How to append to JSON with Java and Jackson - java

I'm trying a simple test where the code appends a few json entries, however it is getting overwritten each time (the json file will only have 1 entry in it after running). I know I need to somehow create an array in JSON using '[]', but how would I go about doing that? Also, is there a better way to be doing this? I've been searching around and every library seems clunky with lots of user written code. Thanks
public class REEEE {
private static Staff createStaff() {
Staff staff = new Staff();
staff.setName("mkyong");
staff.setAge(38);
staff.setPosition(new String[] { "Founder", "CTO", "Writer" });
Map<String, Double> salary = new HashMap() {
{
put("2010", 10000.69);
}
};
staff.setSalary(salary);
staff.setSkills(Arrays.asList("java", "python", "node", "kotlin"));
return staff;
}
public static void main(String[] args) throws IOException {
ObjectMapper mapper = new ObjectMapper();
File file = new File("src//j.json");
for(int i = 0; i < 4; i++) {
Staff staff = createStaff();
try {
// Java objects to JSON file
mapper.writeValue(file, staff);
// Java objects to JSON string - compact-print
String jsonString = mapper.writeValueAsString(staff);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

You can add staff in List and then write the list to file as below,
List<Staff> staffList = new LinkedList<>()
for(int i = 0; i < 4; i++) {
Staff staff = createStaff();
staffList.add(staff);
}
mapper.writeValue(file, staffList);
Hope it helps.

Jackson was implemented to parse and generate JSON payloads. All extra logic related with adding new element to array and writing back to file you need to implement yourself. It should not be hard to do:
class JsonFileAppender {
private final ObjectMapper jsonMapper;
public JsonFileAppender() {
this.jsonMapper = JsonMapper.builder().build();
}
public void appendToArray(File jsonFile, Object value) throws IOException {
Objects.requireNonNull(jsonFile);
Objects.requireNonNull(value);
if (jsonFile.isDirectory()) {
throw new IllegalArgumentException("File can not be a directory!");
}
JsonNode node = readArrayOrCreateNew(jsonFile);
if (node.isArray()) {
ArrayNode array = (ArrayNode) node;
array.addPOJO(value);
} else {
ArrayNode rootArray = jsonMapper.createArrayNode();
rootArray.add(node);
rootArray.addPOJO(value);
node = rootArray;
}
jsonMapper.writeValue(jsonFile, node);
}
private JsonNode readArrayOrCreateNew(File jsonFile) throws IOException {
if (jsonFile.exists() && jsonFile.length() > 0) {
return jsonMapper.readTree(jsonFile);
}
return jsonMapper.createArrayNode();
}
}
Example usage with some usecases:
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class JsonPathApp {
public static void main(String[] args) throws Exception {
Path jsonTmpFile = Files.createTempFile("json", "array");
JsonFileAppender jfa = new JsonFileAppender();
// Add POJO
jfa.appendToArray(jsonTmpFile.toFile(), createStaff());
printContent(jsonTmpFile); //1
// Add primitive
jfa.appendToArray(jsonTmpFile.toFile(), "Jackson");
printContent(jsonTmpFile); //2
// Add another array
jfa.appendToArray(jsonTmpFile.toFile(), Arrays.asList("Version: ", 2, 10, 0));
printContent(jsonTmpFile); //3
// Add another object
jfa.appendToArray(jsonTmpFile.toFile(), Collections.singletonMap("simple", "object"));
printContent(jsonTmpFile); //4
}
private static Staff createStaff() {
Staff staff = new Staff();
staff.setName("mkyong");
staff.setAge(38);
staff.setPosition(new String[]{"Founder", "CTO", "Writer"});
Map<String, Double> salary = new HashMap<>();
salary.put("2010", 10000.69);
staff.setSalary(salary);
staff.setSkills(Arrays.asList("java", "python", "node", "kotlin"));
return staff;
}
private static void printContent(Path path) throws IOException {
List<String> lines = Files.readAllLines(path);
System.out.println(String.join("", lines));
}
}
Above code prints 4 lines:
1
[{"name":"mkyong","age":38,"position":["Founder","CTO","Writer"],"salary":{"2010":10000.69},"skills":["java","python","node","kotlin"]}]
2
[{"name":"mkyong","age":38,"position":["Founder","CTO","Writer"],"salary":{"2010":10000.69},"skills":["java","python","node","kotlin"]},"Jackson"]
3
[{"name":"mkyong","age":38,"position":["Founder","CTO","Writer"],"salary":{"2010":10000.69},"skills":["java","python","node","kotlin"]},"Jackson",["Version: ",2,10,0]]
4
[{"name":"mkyong","age":38,"position":["Founder","CTO","Writer"],"salary":{"2010":10000.69},"skills":["java","python","node","kotlin"]},"Jackson",["Version: ",2,10,0],{"simple":"object"}]

Related

List returning duplicate values while iterating in Java

Below program is iterating completely same values even though parsing is correct from JsonNode-
package com.test.testing;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.kafka.streams.KeyValue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class LoopCheck {
public static void main(String args[]) throws IOException {
String jsonValue = "{\"id\":\"247\",\"effective_date\":\"2020-06-21T23:30:00Z\",\"code_portions\":[{\"portion_code\":\"123\",\"portion_name\":\"Test1\"},{\"portion_code\":\"1234\",\"portion_name\":\"Test2\"}]}";
ObjectMapper objectNode = new ObjectMapper();
JsonNode value = objectNode.readTree(jsonValue);
jsonLoopFunc("abc",value);
}
public static void jsonLoopFunc(String key, JsonNode value) throws IOException {
List<KeyValue<String, JsonNode>> recordsList = new ArrayList<>();
ObjectMapper objectNode = new ObjectMapper();
JsonNode portionValue = objectNode.readValue(String.valueOf(value), JsonNode.class);
ObjectNode recordNode = new ObjectMapper().createObjectNode();
JsonNode examPortionNode = value.get("code_portions");
ArrayNode arrayNode = (ArrayNode) examPortionNode;
for (int iterate = 0; iterate <= arrayNode.size() - 1; iterate++) {
JsonNode insideArr;
insideArr = (JsonNode) arrayNode.get(iterate);
recordNode.put("ID", portionValue.get("id").isNull() ? null : value.get("id").asInt());
recordNode.put("eventDateTime", portionValue.get("effective_date").isNull() ? null : value.get("effective_date").asText());
String portionCode = insideArr.get("portion_code").asText();
String portionName = insideArr.get("portion_name").asText();
recordNode.put("PortionIndex", (iterate + 1));
recordNode.put("PortionCode", portionCode);
recordNode.put("PortionName", portionName);
recordNode.put("systemModifiedDate", String.valueOf(java.time.LocalDateTime.now()));
//System.out.println("Outout from ObjectNode = " + recordNode);
recordsList.add(KeyValue.pair(key, recordNode));
}
recordsList.stream().forEach((records) -> System.out.println(records));
}
}
Program output :
KeyValue(abc, {"ID":247,"eventDateTime":"2020-06-21T23:30:00Z","PortionIndex":2,"PortionCode":"1234","PortionName":"Test2","systemModifiedDate":"2021-06-12T13:57:47.551978800"})
KeyValue(abc, {"ID":247,"eventDateTime":"2020-06-21T23:30:00Z","PortionIndex":2,"PortionCode":"1234","PortionName":"Test2","systemModifiedDate":"2021-06-12T13:57:47.551978800"})
Expected output :
KeyValue(abc, {"ID":247,"eventDateTime":"2020-06-21T23:30:00Z","PortionIndex":1,"PortionCode":"123","PortionName":"Test1","systemModifiedDate":"2021-06-12T13:57:47.536357600"})
KeyValue(abc, {"ID":247,"eventDateTime":"2020-06-21T23:30:00Z","PortionIndex":2,"PortionCode":"1234","PortionName":"Test2","systemModifiedDate":"2021-06-12T13:57:47.551978800"})
You are mutating the same recordNode on each pass of the loop. You need to create a new one inside your loop.
Just move this piece of code inside the loop:
ObjectNode recordNode = new ObjectMapper().createObjectNode();

I'm trying to read a text file and store it in an arraylist of objects

I'm trying to read a text file and store it in an arraylist of objects, but I keep getting an error saying I cannot convert a String to an Item, which is type of arraylist I am using. I have tried various solutions, but am not quite sure how its is suppossed to be done. I am new to coding and have this assignment due soon. Anything helps!
private void loadFile(String FileName)
{
Scanner in;
Item line;
try
{
in = new Scanner(new File(FileName));
while (in.hasNext())
{
line = in.nextLine();
MyStore.add(line);
}
in.close();
}
catch (IOException e)
{
System.out.println("FILE NOT FOUND.");
}
}
my apologies for not adding the Item class
public class Item
{
private int myId;
private int myInv;
//default constructor
public Item()
{
myId = 0;
myInv = 0;
}
//"normal" constructor
public Item(int id, int inv)
{
myId = id;
myInv = inv;
}
//copy constructor
public Item(Item OtherItem)
{
myId = OtherItem.getId();
myInv = OtherItem.getInv();
}
public int getId()
{
return myId;
}
public int getInv()
{
return myInv;
}
public int compareTo(Item Other)
{
int compare = 0;
if (myId > Other.getId())
{
compare = 1;
}
else if (myId < Other.getId())
{
compare = -1;
}
return compare;
}
public boolean equals(Item Other)
{
boolean equal = false;
if (myId == Other.getId())
{
equal = true;;
}
return equal;
}
public String toString()
{
String Result;
Result = String.format("%8d%8d", myId, myInv);
return Result;
}
}
This is the creation of my arraylist.
private ArrayList MyStore = new ArrayList ();
Here is a sample of my text file.
3679 87
196 60
12490 12
18618 14
2370 65
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.mycompany.rosmery;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
*
* #author Sem-6-INGENIERIAINDU
*/
public class aaa {
public static void main(String arg[]) throws FileNotFoundException, IOException{
BufferedReader files=new BufferedReader(new FileReader(new File("")));
List<String> dto=new ArrayList<>();
String line;
while((line= files.readLine())!= null){
line= files.readLine();
dto.add(line);
//Hacer la logica para esos datos
}
}
}
in.nextLine() returns a String.
So, you cannot assign in.nextLine() to an instance of Item.
Your code may need to correct it as:
List<String> myStore = new ArrayList<String>();
private void loadFile(String FileName)
{
Scanner in;
try
{
in = new Scanner(new File(FileName));
while (in.hasNext())
{
myStore.add(in.nextLine());
}
in.close();
}
catch (IOException e)
{
System.out.println("FILE NOT FOUND.");
}
}
If you want to have a list of Item after reading a file, then you need provide the logic that convert given line of information into an instance of Item.
let's say your file content is in the following format.
id1,inv1
id2,inv2
.
.
Then, you can use the type Item as the following.
List<Item> myStore = new ArrayList<Item>();
private void loadFile(String FileName)
{
Scanner in;
String[] line;
try
{
in = new Scanner(new File(FileName));
while (in.hasNext())
{
line = in.nextLine().split(",");
myStore.add(new Item(line[0], line[1]));
}
in.close();
}
catch (IOException e)
{
System.out.println("FILE NOT FOUND.");
}
}
One of the possible solutions (assuming that the data in file lines is separated by a comma), with using streams:
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class Main {
public static void main(String[] args) throws IOException {
List<Item> items = loadFile("myfile.txt");
System.out.println(items);
}
private static List<Item> loadFile(String fileName) throws IOException {
try (Stream<String> stream = Files.lines(Paths.get(fileName))) {
return stream
.map(s -> Stream.of(s.split(",")).mapToInt(Integer::parseInt).toArray())
.map(i -> new Item(i[0], i[1]))
.collect(Collectors.toList());
}
}
}
or with foreach:
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class Main {
public static void main(String[] args) throws IOException {
List<Item> items = new ArrayList<>();
for (String line : loadFile("myfile.txt")) {
String[] data = line.split(",");
int id = Integer.parseInt(data[0]);
int inv = Integer.parseInt(data[1]);
items.add(new Item(id, inv));
}
System.out.println(items);
}
private static List<String> loadFile(String fileName) throws IOException {
try (Stream<String> stream = Files.lines(Paths.get(fileName))) {
return stream.collect(Collectors.toList());
}
}
}

How to convert the free text into Json String Array in Java using GSON library? [duplicate]

This question already has an answer here:
directly convert CSV file to JSON file using the Jackson library
(1 answer)
Closed 7 years ago.
I am having free text available into the file.
I am stucked while convert it into the json string array
The columns names are variable and can be n number of columns
email_from,email_to,DATE_CHANGED
samwilliams#gmail.com, mike_haley#gmail.com, 1447666867
smithpaul#gmail.com, angierussell#gmail.com, 1447668867
The first line is of headers, and the rest of all the lines are their values.
So, every line would contain, same number of parameters with respect to each column.
Columns are comma separated.
The json string response should be looked like this
{
"data": [
{
"email_from": "samwilliams#gmail.com",
"email_to": "mike_haley#gmail.com",
"DATE_CHANGED": "1447666867"
},
{
"email_from": "smithpaul#gmail.com",
"email_to": "angierussell#gmail.com",
"DATE_CHANGED": "1447668867"
}
]
}
The following code opens a file with comma-delimited strings and uses while loop to construct JsonObject and then keeps adding them to the JsonArray and finally prints it (Please add your validations as well you could move majority of the code out of try block if you wish to make the code perform better).
It addresses the need for having n number of columns in the file.
package gjson;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class GJSONTest {
public static void main(String[] args) {
// create an array called datasets
JsonArray datasets = new JsonArray();
File file = new File("C:\\test_stackoverflow\\list.txt");
try (BufferedReader br = new BufferedReader(new FileReader(file))) {
String line;
boolean flag = true;
List<String> columns = null;
while ((line = br.readLine()) != null) {
if (flag) {
flag = false;
//process header
columns = Arrays.asList(line.split(","));
} else {
//to store the object temporarily
JsonObject obj = new JsonObject();
List<String> chunks = Arrays.asList(line.split(","));
for(int i = 0; i < columns.size(); i++) {
obj.addProperty(columns.get(i), chunks.get(i));
}
datasets.add(obj);
}
}
} catch(FileNotFoundException fnfe) {
System.out.println("File not found.");
} catch(IOException io) {
System.out.println("Cannot read file.");
}
Gson gson = new GsonBuilder().setPrettyPrinting().serializeNulls().setFieldNamingPolicy(FieldNamingPolicy.UPPER_CAMEL_CASE).create();
System.out.println(gson.toJson(datasets));
}
}
See the following screenshots (below with 3 columns)
Added another column to the text file and following is the output.
Here is the sample .txt file containing your data
private String getParsedData(String data){
String[] lines = data.split("\\r?\\n");
List<Map> dataList = new ArrayList<Map>();
int colCount = 0;
if(lines.length > 1){
String keyLine = lines[0];
String[] keys = keyLine.split(",");
for(int i = 1; i < lines.length; i++){
colCount = 0;
Map<String, Object> rawObj = new HashMap<String, Object>();
try {
String[] values = lines[i].split(",");
for(String value: values){
rawObj.put(keys[colCount], value);
colCount++;
}
} catch (Exception e) {
}
dataList.add(rawObj);
}
}
Map<String, Object> rawObj = new HashMap<String, Object>();
rawObj.put("data", dataList);
Gson gson = new Gson();
String res = gson.toJson(rawObj);
return res;
}
String data = "email_from,email_to,DATE_CHANGED\r\nsamwilliams#gmail.com, mike_haley#gmail.com, 1447666867\r\nsmithpaul#gmail.com, angierussell#gmail.com, 1447668867";
But I am not sure whether its an efficient code or not.
Try this code:
public class ContactObject {
private String emailFrom;
private String emailTo;
private String dateChanged;
public ContactObject(String emailFrom, String emailTo, String dateChanged) {
this.emailFrom = emailFrom;
this.emailTo = emailTo;
this.dateChanged = dateChanged;
}
#Override
public String toString() {
return "{email_from:" + emailFrom + ", email_to:" + emailTo + ", DATE_CHANGED:" + dateChanged;
}
}
public class ContactJSON {
private List<ContactObject> data;
public ContactJSON(List<ContactObject> contactList) {
this.data = contactList;
}
}
Then in your main() method you can make use of these classes:
public static void main(String[] args) {
List<ContactObject> contactList = new ArrayList<ContactObject>();
ContactObject obj1 = new ContactObject("samwilliams#gmail.com", "mike_haley#gmail.com", "1447666867");
ContactObject obj2 = new ContactObject("smithpaul#gmail.com", "angierussell#gmail.com", "1447668867");
contactList.add(obj1);
contactList.add(obj2);
Gson gson = new Gson();
String json = gson.toJson(new ContactJSON(contactList));
System.out.println(json);
}
Output:
{"data":[{"emailFrom":"samwilliams#gmail.com","emailTo":"mike_haley#gmail.com","dateChanged":"1447666867"},{"emailFrom":"smithpaul#gmail.com","emailTo":"angierussell#gmail.com","dateChanged":"1447668867"}]}

Java Hashmap from csv

I'm trying to use HAshmap in a class in order to, from other classes, look up product descriptions given the product code.
Whith the setter (populate) everything seems to work fine , but when I'm tryig to use a getter (getdesc) i get an error.
The csv file i'm using has only 2 filds (cod,des). Later, I plan to populate the hashmap from JDBC SQL server source.
I'm probabbly using the wrong syntax. I'll apreciate if anyone can help.
That's my current class code:
package bookRecommender;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
public class ProdutoDescricao
{
public static void main(String[] args) {}
#SuppressWarnings({ "resource", "unused" })
public static void populate(String[] args) throws IOException {
Map<String, ArrayList<String>> produtos=null;
try {
String csvFile = "Data/produto_descricao.csv";
BufferedReader br = new BufferedReader(new FileReader(csvFile));
String line = "";
StringTokenizer st = null;
produtos= new HashMap<String, ArrayList<String>>();
int lineNumber = 0;
int tokenNumber = 0;
while ((line = br.readLine()) != null) {
lineNumber++;
st = new StringTokenizer(line, ",");
while (st.hasMoreTokens()) {
tokenNumber++;
String token_lhs=st.nextToken();
String token_rhs= st.nextToken();
ArrayList<String> arrVal = produtos.get(token_lhs);
if (arrVal == null) {
arrVal = new ArrayList<String>();
produtos.put(token_lhs,arrVal);
}
arrVal.add(token_rhs);
}
}
System.out.println("Final Hashmap is : "+produtos);
} catch (Exception e) {
System.err.println("CSV file cannot be read : " + e);
}
}
public String getdesc (long cod)
{
return produto.get(cod);
//Here is the sysntax error
}
}
produtos= new HashMap<String, ArrayList<String>>() produtos it has no values it is blank.
ArrayList<String> arrVal = produtos.get(token_lhs); this line has issue, you have to first add values in your map then get those values.
You have a map with String key and ArrayList values types, but you trying to retrieve value with long key and String values type.
public ArrayList<String> getdesc (String cod)
{
return produtos.get(cod);
}
Also declare field 'produtos':
private static Map<String, ArrayList<String>> produtos;
Full class code: http://pastebin.com/QLZryqT8

kryo serializing of class (task object) in apache spark returns null while de-serialization

I am using java spark API to write some test application . I am using a class which doesn't extends serializable interface . So to make the application work I am using kryo serializer to serialize the class . But the problem which I observed while debugging was that during the de-serialization the returned class object becomes null and in turn throws a null pointer exception . It seems to be closure problem where things are going wrong but not sure.Since I am new to this kind of serialization I don't know where to start digging.
Here is the code I am testing :
package org.apache.spark.examples;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
/**
* Spark application to test the Serialization issue in spark
*/
public class Test {
static PrintWriter outputFileWriter;
static FileWriter file;
static JavaSparkContext ssc;
public static void main(String[] args) {
String inputFile = "/home/incubator-spark/examples/src/main/scala/org/apache/spark/examples/InputFile.txt";
String master = "local";
String jobName = "TestSerialization";
String sparkHome = "/home/test/Spark_Installation/spark-0.7.0";
String sparkJar = "/home/test/TestSerializationIssesInSpark/TestSparkSerIssueApp/target/TestSparkSerIssueApp-0.0.1-SNAPSHOT.jar";
SparkConf conf = new SparkConf();
conf.set("spark.closure.serializer","org.apache.spark.serializer.KryoSerializer");
conf.set("spark.kryo.registrator", "org.apache.spark.examples.MyRegistrator");
// create the Spark context
if(master.equals("local")){
ssc = new JavaSparkContext("local", jobName,conf);
//ssc = new JavaSparkContext("local", jobName);
} else {
ssc = new JavaSparkContext(master, jobName, sparkHome, sparkJar);
}
JavaRDD<String> testData = ssc.textFile(inputFile).cache();
final NotSerializableJavaClass notSerializableTestObject= new NotSerializableJavaClass("Hi ");
#SuppressWarnings({ "serial", "unchecked"})
JavaRDD<String> classificationResults = testData.map(
new Function<String, String>() {
#Override
public String call(String inputRecord) throws Exception {
if(!inputRecord.isEmpty()) {
//String[] pointDimensions = inputRecord.split(",");
String result = "";
try {
FileWriter file = new FileWriter("/home/test/TestSerializationIssesInSpark/results/test_result_" + (int) (Math.random() * 100));
PrintWriter outputFile = new PrintWriter(file);
InetAddress ip;
ip = InetAddress.getLocalHost();
outputFile.println("IP of the server: " + ip);
result = notSerializableTestObject.testMethod(inputRecord);
outputFile.println("Result: " + result);
outputFile.flush();
outputFile.close();
file.close();
} catch (UnknownHostException e) {
e.printStackTrace();
}
catch (IOException e1) {
e1.printStackTrace();
}
return result;
} else {
System.out.println("End of elements in the stream.");
String result = "End of elements in the input data";
return result;
}
}
}).cache();
long processedRecords = classificationResults.count();
ssc.stop();
System.out.println("sssssssssss"+processedRecords);
}
}
Here is the KryoRegistrator class
package org.apache.spark.examples;
import org.apache.spark.serializer.KryoRegistrator;
import com.esotericsoftware.kryo.Kryo;
public class MyRegistrator implements KryoRegistrator {
public void registerClasses(Kryo kryo) {
kryo.register(NotSerializableJavaClass.class);
}
}
Here is the class I am serializing :
package org.apache.spark.examples;
public class NotSerializableJavaClass {
public String testVariable;
public NotSerializableJavaClass(String testVariable) {
super();
this.testVariable = testVariable;
}
public String testMethod(String vartoAppend){
return this.testVariable + vartoAppend;
}
}
This is because spark.closure.serializer only supports the Java serializer. See http://spark.apache.org/docs/latest/configuration.html about spark.closure.serializer

Categories

Resources