Read date values from a JSON file into Java HashMap - java

I have a JSON file that looks like this:
{
"calendar": {
"dateToDayId": {
"2016-07-14": 290356,
"2016-08-26": 380486,
"2016-09-07": 417244,
"2016-08-15": 354271,
"2016-07-25": 311762
},
"dishIdToMealId": {
"1228578": 474602,
"1228585": 474602,
"1228586": 474602,
...... // more fields
}
I'm trying to read the <date, number> pairs under dateToDayId into a HashMap<Date, Long> in java. First I created a JsonInfo class which holds a Calendar class object. The Calendar class object in turns holds all the classes including DateToDayId, DishToMealId and so on. My DateToDayId class looks like this (I'm using Jackson to parse JSON file):
package jsonfields;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import deserializers.CustomDateDeserializer;
import java.util.Date;
import java.util.TreeMap;
public class DateToDayId {
#JsonDeserialize(using = CustomDateDeserializer.class)
private TreeMap <Date, Long> dateToDayMappings;
public TreeMap<Date, Long> getDateToDayMappings() {
return dateToDayMappings;
}
public void setDateToDayMappings(TreeMap<Date, Long> dateToDayMappings) {
this.dateToDayMappings = dateToDayMappings;
}
#Override
public String toString() {
return "DateToDayId{" +
"dateToDayMappings=" + dateToDayMappings +
'}';
}
}
My Main class looks like this:
import java.io.*;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class Main {
public static void main(String[] args) {
ObjectMapper objectMapper = new ObjectMapper();
try {
InputStreamReader inputStreamReader = new InputStreamReader(new FileInputStream("path\\to\\file\\test.json"));
JsonInfo jsonInfo = objectMapper.readValue(inputStreamReader, JsonInfo.class);
} catch (JsonProcessingException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
When I run it, I get the following exception:
com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException: Unrecognized field "2016-07-14" (class jsonfields.DateToDayId), not marked as ignorable (one known property: "dateToDayMappings"])
at [Source: (InputStreamReader); line: 1, column: 48] (through reference chain: JsonInfo["calendar"]->jsonfields.Calendar["dateToDayId"]->jsonfields.DateToDayId["2016-07-14"])
How can I fix this?

Your class mapping is wrong. You need to have map with variable name dateToDayId in Calendar. Below modified code should work for your need.
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Date;
import java.util.TreeMap;
public class JSONMapTest {
public static void main(String[] args) {
ObjectMapper objectMapper = new ObjectMapper();
try {
InputStreamReader inputStreamReader = new InputStreamReader(new FileInputStream("C:\\ws\\test\\test.json"));
JsonInfo jsonInfo = objectMapper.readValue(inputStreamReader, JsonInfo.class);
System.out.println(jsonInfo);
} catch (JsonProcessingException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
class JsonInfo{
Calendar calendar;
public Calendar getCalendar() {
return calendar;
}
public void setCalendar(Calendar calendar) {
this.calendar = calendar;
}
}
class Calendar {
private TreeMap <Date, Long> dateToDayId;
public TreeMap<Date, Long> getDateToDayId() {
return dateToDayId;
}
public void setDateToDayId(TreeMap<Date, Long> dateToDayId) {
this.dateToDayId = dateToDayId;
}
#Override
public String toString() {
return "DateToDayId{" +
"dateToDayMappings=" + dateToDayId +
'}';
}
}

Related

java.lang.NullPointerException: Cannot invoke "org.json.JSONObject.getJSONObject(String)" because "a1.JsonConfig.configProperties" is null

I am writing the below code for a login page by using JSON for the data-driven but it showing "Cannot invoke "org.json.JSONObject.getJSONObject(String)" because "a1.JsonConfig.configProperties" is null"
package a1;
import java.nio.file.Paths;
import java.nio.file.Files;
import org.json.JSONObject;
public class JsonConfig {
static JSONObject configProperties;
public static void loadConfig() {
try {
configProperties = new JSONObject(
new String(Files.readAllBytes(Paths.get("src\\main\\java\\Config.json"))));
} catch (Exception e) {
System.out.println("unable to load the json file path");
}
}
public static String getUserName(String userName) {
return configProperties.getJSONObject("users").getJSONObject(userName).getString("username");
}
public static String getPassword(String userName) {
return configProperties.getJSONObject("users").getJSONObject(userName).getString("password");
}
}

How to read properties file in java, which has value in key-value pair

if properties file contains below type values how to read it directly into map
user={'name':'test','age':'23','place':'london'}.
Thanks in advance!
You can inject values into a Map from the properties file using the #Value annotation like this.
#Value("#{${user}}")
private Map<String, String> user;
Entry in your application.properties must be:
user = {"name":"test","age":"23","place":"London"}
test.properties file
name=test
age=23
place=london
code
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Properties;
public class ReadPropertiesFile {
public static void main(String[] args) {
try {
File file = new File("test.properties");
FileInputStream fileInput = new FileInputStream(file);
Properties properties = new Properties();
properties.load(fileInput);
fileInput.close();
Enumeration enuKeys = properties.keys();
while (enuKeys.hasMoreElements()) {
String key = (String) enuKeys.nextElement();
String value = properties.getProperty(key);
System.out.println(key + ": " + value);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Hope, this would help.. :)
This will read a property file and put it in Properties object.
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class PropertiesConfig {
private Properties prop;
private PropertiesConfig() {
super();
init();
}
private static class PropertiesInstance {
private static PropertiesConfig instance = null;
public static PropertiesConfig getInstance() {
if (null == instance) {
instance = new PropertiesConfig();
}
return instance;
}
}
public static PropertiesConfig getInstance() {
return PropertiesInstance.getInstance();
}
private void init() {
prop = new Properties();
try (InputStream input = getClass().getResourceAsStream("/sample_config.properties")) {
// load a properties file
prop.load(input);
} catch (IOException ex) {
ex.printStackTrace();
}
}
public String getProperty(String key) {
return prop.getProperty(key);
}
}
Now you can use any library to convert a value to Map as your value looks like a JSON.
Code example to achieve this through Jackson:
public static Map < String, Object > covertFromJsonToMap(String json) throws JsonTransformException {
try {
return mapper.readValue(json, new TypeReference < HashMap < String, Object >> () {});
} catch (Exception e) {
LOGGER.error("Error " + json, e);
throw new JsonTransformException("Error in json parse", e);
}
}
So something like this will do:
covertFromJsonToMap(PropertiesConfig.getInstance().get("user"));
Looks like you have a JSON representation of your map as a value. Once you read the value as a String in Java you can use Gson to convert it to map
Gson gson = new Gson();
String json = <YOUR_STRING>
Map<String,Object> map = new HashMap<String,Object>();
map = (Map<String,Object>) gson.fromJson(json, map.getClass());

dynamic header CSVParser

Below code parses the CSV records if the header is always known in advance and we can declare the array values for FILE_HEADER_MAPPING.
CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(FILE_HEADER_MAPPING);
FileReader fileReader = new FileReader("file");
CSVParser csvFileParser = new CSVParser(fileReader, csvFileFormat);
Iterable<CSVRecord> records = csvFileParser.getRecords();
but how to create the CSVParser for the CSV files in which the headers differs for each csv file.
I will not know the header of the csv file to create with the format
CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(FILE_HEADER_MAPPING);
I want to have a csv parser for each possible csv headers.
Please help me to solve this scenario.
package dfi.fin.dcm.syn.loantrading.engine.source.impl;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.AMOUNT;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.FCN;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.FEE_TYPE;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.LINE_TYPE;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.LINE_TYPE_VALUE_CARRY_EVT;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.MARKIT_ID;
import static dfi.fin.dcm.syn.loantrading.engine.task.impl.BackOfficeCSVHelper.VALUE_DATE;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import com.csvreader.CsvReader.CatastrophicException;
import com.csvreader.CsvReader.FinalizedException;
import dfi.fin.dcm.syn.loantrading.engine.source.SourceException;
import dfi.fin.dcm.syn.loantrading.model.portfolio.Portfolio;
#Deprecated
public class CarryEventStreamSource extends AbstractInputStreamSource<CarryEventData> {
private static String [] headers = {LINE_TYPE,VALUE_DATE,MARKIT_ID,FEE_TYPE,AMOUNT};
private SimpleDateFormat dateFormat = null;
public CarryEventStreamSource(InputStream stream) {
super(stream);
dateFormat = new SimpleDateFormat("dd/MM/yy");
}
public CarryEventData readNextElementInternal() throws SourceException, IOException, CatastrophicException, FinalizedException {
//skipping all events which are not Carry
boolean loop = true;
while (loop) {
// skipping all events which are not Carry
if(getReader().readRecord() && !getReader().get(LINE_TYPE).trim().equals(LINE_TYPE_VALUE_CARRY_EVT)) {
loop = true;
} else {
loop = false;
}
}
//EOF?
if (getReader().get(LINE_TYPE).trim().equals(LINE_TYPE_VALUE_CARRY_EVT)) {
CarryEventData toReturn = new CarryEventData();
toReturn.setComputationDate(Calendar.getInstance().getTime());
try {
toReturn.setValueDate(getDateFormat().parse(getReader().get(VALUE_DATE).trim()));
} catch (ParseException e) {
throw new SourceException(e);
}
if (!getPortfolio().getMtmSourceType().equals(Portfolio.MTM_SOURCE_TYPE_NONE)) {
if (getReader().get(MARKIT_ID).trim() == null) {
throw new SourceException("Back Office file invalid data format: the markit id is missing on line "+getReader().getCurrentRecord());
}
toReturn.setTrancheMarkitId(getReader().get(MARKIT_ID).trim());
} else {
if (getReader().get(FCN)==null || "".equals(getReader().get(FCN).trim())) {
throw new SourceException("Back Office file invalid data format: missing loan tranche id on line "+getReader().getCurrentRecord());
}
toReturn.setTrancheMarkitId(getReader().get(FCN).trim());
}
if (getReader().get(FEE_TYPE).equals("")) {
toReturn.setFeeType(null);
} else {
toReturn.setFeeType(getReader().get(FEE_TYPE).trim());
}
if (getReader().get(AMOUNT)==null) {
throw new SourceException("Back Office file invalid data format: missing amount on line "+getReader().getCurrentRecord());
}
try {
toReturn.setAmount(new BigDecimal(getReader().get(AMOUNT)));
} catch (NumberFormatException ex) {
throw new SourceException(ex,"Back Office file invalid data format: invalid amount on line "+getReader().getCurrentRecord());
}
return toReturn;
}
// no carry found, null is returned
return null;
}
public SimpleDateFormat getDateFormat() {
return dateFormat;
}
public void setDateFormat(SimpleDateFormat dateFormat) {
this.dateFormat = dateFormat;
}
#Override
public char getDelimiter() {
return ',';
}
#Override
public List<String> getHeaderSet() {
return Arrays.asList(headers);
}
#Override
public String getName() {
return "File import";
package dfi.fin.dcm.syn.loantrading.engine.source.impl;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import com.csvreader.CsvReader.CatastrophicException;
import com.csvreader.CsvReader.FinalizedException;
import dfi.fin.dcm.syn.loantrading.engine.source.SourceException;
import dfi.fin.dcm.syn.loantrading.model.common.LTCurrency;
import dfi.fin.dcm.syn.loantrading.model.engine.event.CurrencyEvent;
public class SpotForexRateStreamSource extends AbstractInputStreamSource<CurrencyEvent> {
private SimpleDateFormat dateFormat;
private static String [] headers = {"CURRENCY","DATE","MID"};
public SpotForexRateStreamSource(InputStream stream) {
super(stream);
dateFormat = new SimpleDateFormat("dd/MM/yy");
}
#Override
public CurrencyEvent readNextElementInternal() throws SourceException, IOException, FinalizedException, CatastrophicException {
//skipping all events which are not Trade
if (getReader().readRecord()) {
CurrencyEvent event = new CurrencyEvent();
//retrieving the currency
LTCurrency currency = getCurrencyDAO().getLTCurrencyByISOCode(getReader().get("CURRENCY"));
event.setCurrency(currency);
try {
event.setDate(getDateFormat().parse(getReader().get("DATE")));
} catch (ParseException e) {
throw new SourceException(e, "Parse error while reading currency event date");
}
event.setExchangeRate(new BigDecimal(getReader().get("MID")));
event.setComputationDate(Calendar.getInstance().getTime());
return event;
}
return null;
}
#Override
public char getDelimiter() {
return ';';
}
public SimpleDateFormat getDateFormat() {
return dateFormat;
}
public void setDateFormat(SimpleDateFormat dateFormat) {
this.dateFormat = dateFormat;
}
#Override
public List<String> getHeaderSet() {
return Arrays.asList(headers);
}
#Override
public String getName() {
return "CSV File";
}
}
}
}

I am then trying to use the Jackson API to parse the json input stream twice and I get an java.io.EOFException

My ParserCheck.java :-
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.IOUtils;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
public class ParserCheck {
public static void main(String[] args){
ObjectMapper objectMapper = new ObjectMapper();
String responseJson = "{\"status\":\"200\",\"message\":\"Success\"}";
InputStream streamResponse = IOUtils.toInputStream(responseJson);
try {
MyJsonResponse response = objectMapper.readValue(streamResponse, MyJsonResponse.class);
System.out.println(response);
} catch ( JsonParseException | JsonMappingException e) {
e.printStackTrace();
} catch (IOException e){
e.printStackTrace();
}
try {
MyJsonResponse response = objectMapper.readValue(streamResponse, MyJsonResponse.class);
System.out.println(response);
} catch ( JsonParseException | JsonMappingException e) {
e.printStackTrace();
} catch (IOException e){
e.printStackTrace();
}finally{
try {
streamResponse.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
My JsonResponse.java:
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.annotate.JsonSerialize;
#JsonIgnoreProperties(ignoreUnknown=true)
#JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
public class MyJsonResponse {
#JsonProperty
private Integer status;
#JsonProperty
private String message;
public Integer getStatus() {
return status;
}
public String getMessage() {
return message;
}
#Override
public String toString(){
return "Json Response [Status ="+ status +", Message =" + message + " ]";
}
}
I get the proper output the first time but the second time I get an exception.
This is the output:
Json Response [Status =200, Message =Success ]java.io.EOFException: No content to map to Object due to end of input
at org.codehaus.jackson.map.ObjectMapper._initForReading(ObjectMapper.java:2775)
at org.codehaus.jackson.map.ObjectMapper._readMapAndClose(ObjectMapper.java:2718)
at org.codehaus.jackson.map.ObjectMapper.readValue(ObjectMapper.java:1909)
at.service.provider.ParserCheck.main(ParserCheck.java:33)
What should be done in this case. I tried closing the stream after the first output but it still gives me the exception.
You're reusing a stream. the first call to
objectMapper.readValue(streamResponse, MyJsonResponse.class);
reads the stream to the end, so when you call it the second time, the stream has no more content. eg.
stremResponse.read() returns -1
Is there any reason you are using a stream? I tried:
MyJsonResponse response = objectMapper.readValue(responseJson, MyJsonResponse.class);
which works as expected.

kryo serializing of class (task object) in apache spark returns null while de-serialization

I am using java spark API to write some test application . I am using a class which doesn't extends serializable interface . So to make the application work I am using kryo serializer to serialize the class . But the problem which I observed while debugging was that during the de-serialization the returned class object becomes null and in turn throws a null pointer exception . It seems to be closure problem where things are going wrong but not sure.Since I am new to this kind of serialization I don't know where to start digging.
Here is the code I am testing :
package org.apache.spark.examples;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
/**
* Spark application to test the Serialization issue in spark
*/
public class Test {
static PrintWriter outputFileWriter;
static FileWriter file;
static JavaSparkContext ssc;
public static void main(String[] args) {
String inputFile = "/home/incubator-spark/examples/src/main/scala/org/apache/spark/examples/InputFile.txt";
String master = "local";
String jobName = "TestSerialization";
String sparkHome = "/home/test/Spark_Installation/spark-0.7.0";
String sparkJar = "/home/test/TestSerializationIssesInSpark/TestSparkSerIssueApp/target/TestSparkSerIssueApp-0.0.1-SNAPSHOT.jar";
SparkConf conf = new SparkConf();
conf.set("spark.closure.serializer","org.apache.spark.serializer.KryoSerializer");
conf.set("spark.kryo.registrator", "org.apache.spark.examples.MyRegistrator");
// create the Spark context
if(master.equals("local")){
ssc = new JavaSparkContext("local", jobName,conf);
//ssc = new JavaSparkContext("local", jobName);
} else {
ssc = new JavaSparkContext(master, jobName, sparkHome, sparkJar);
}
JavaRDD<String> testData = ssc.textFile(inputFile).cache();
final NotSerializableJavaClass notSerializableTestObject= new NotSerializableJavaClass("Hi ");
#SuppressWarnings({ "serial", "unchecked"})
JavaRDD<String> classificationResults = testData.map(
new Function<String, String>() {
#Override
public String call(String inputRecord) throws Exception {
if(!inputRecord.isEmpty()) {
//String[] pointDimensions = inputRecord.split(",");
String result = "";
try {
FileWriter file = new FileWriter("/home/test/TestSerializationIssesInSpark/results/test_result_" + (int) (Math.random() * 100));
PrintWriter outputFile = new PrintWriter(file);
InetAddress ip;
ip = InetAddress.getLocalHost();
outputFile.println("IP of the server: " + ip);
result = notSerializableTestObject.testMethod(inputRecord);
outputFile.println("Result: " + result);
outputFile.flush();
outputFile.close();
file.close();
} catch (UnknownHostException e) {
e.printStackTrace();
}
catch (IOException e1) {
e1.printStackTrace();
}
return result;
} else {
System.out.println("End of elements in the stream.");
String result = "End of elements in the input data";
return result;
}
}
}).cache();
long processedRecords = classificationResults.count();
ssc.stop();
System.out.println("sssssssssss"+processedRecords);
}
}
Here is the KryoRegistrator class
package org.apache.spark.examples;
import org.apache.spark.serializer.KryoRegistrator;
import com.esotericsoftware.kryo.Kryo;
public class MyRegistrator implements KryoRegistrator {
public void registerClasses(Kryo kryo) {
kryo.register(NotSerializableJavaClass.class);
}
}
Here is the class I am serializing :
package org.apache.spark.examples;
public class NotSerializableJavaClass {
public String testVariable;
public NotSerializableJavaClass(String testVariable) {
super();
this.testVariable = testVariable;
}
public String testMethod(String vartoAppend){
return this.testVariable + vartoAppend;
}
}
This is because spark.closure.serializer only supports the Java serializer. See http://spark.apache.org/docs/latest/configuration.html about spark.closure.serializer

Categories

Resources