I came across this question and answer, showing how to push data from DynamoDB to Elasticsearch for full-text search indexing. Our application, however, is not making use of Lambdas. Instead, we're using Apache Camel to capture DynamoDB Streams events and want to push the records to Elasticsearch from there.
Since we are using AWS SDK v2, we're not capturing a DynamodbEvent class or corresponding DynamodbStreamRecord record class containing the DynamoDB record. Instead, we are receiving a software.amazon.awssdk.services.dynamodb.model.Record object. Given that, how can we serialize and subsequently index this data in Elasticsearch? In the other question referenced, the record is converted to a JSON string and then sent over to Elasticsearch. Is there a way to do this with the v2 Record class? The ItemUtils class mentioned in the answer no longer exists, so I was unaware of another way to serialize it.
Any help you can give is greatly appreciated!!
Similar to the example you provided, you can try something like the following:
public void processRecord(Record record, String index, String type, RestHighLevelClient esClient) throws Exception {
// Get operation
final OperationType operationType = record.eventName();
// Obtain a reference to actual DynamoDB stream record
final StreamRecord streamRecord = record.dynamodb();
// Get ID. Assume single numeric attribute as partition key
final Map<String,AttributeValue> keys = streamRecord.keys();
final String recordId = keys.get("ID").n();
switch (operationType) {
case INSERT:
if (!streamRecord.hasNewImage()) {
throw new IllegalArgumentException("No new image when inserting");
}
Map<String,AttributeValue> newImage = streamRecord.newImage();
// Where toJson is defined here https://github.com/aaronanderson/aws-java-sdk-v2-utils/blob/master/src/main/java/DynamoDBUtil.java
// and included below
JsonObject jsonObject = toJson(newImage);
IndexRequest indexRequest = new IndexRequest(index, type, recordId);
indexRequest.source(jsonObject.toString(), XContentType.JSON);
IndexResponse indexResponse = esClient.index(indexRequest, RequestOptions.DEFAULT);
System.out.println("New content successfully indexed: " + indexResponse);
break;
case MODIFY:
if (!streamRecord.hasNewImage()) {
throw new IllegalArgumentException("No new image when updating");
}
Map<String,AttributeValue> newImage = streamRecord.newImage();
JsonObject jsonObject = toJson(newImage);
UpdateRequest updateRequest = new UpdateRequest(index, type, recordId);
request.doc(jsonObject.toString(), XContentType.JSON);
UpdateResponse updateResponse = esClient.update(updateRequest, RequestOptions.DEFAULT);
System.out.println("Content successfully updated: " + updateResponse);
break;
case REMOVE:
DeleteRequest deleteRequest = new DeleteRequest(index, type, recordId);
DeleteResponse deleteResponse = esClient.delete(deleteRequest, RequestOptions.DEFAULT);
System.out.println("Successfully removed: " + deleteResponse);
break;
default:
throw new UnsupportedOperationException("Operation type " + opetationType + " not supportd");
}
}
The toJson method is defined is this class: https://github.com/aaronanderson/aws-java-sdk-v2-utils/blob/master/src/main/java/DynamoDBUtil.java
The source code is reproduced here:
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.Inflater;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonNumber;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonString;
import javax.json.JsonStructure;
import javax.json.JsonValue;
import software.amazon.awssdk.core.SdkBytes;
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
/** This is a utility for converting DynamoDB AttributeValues to and from Java JSON-P objects */
public class DynamoDBUtil {
public static void addList(String key, JsonObjectBuilder objectBuilder, List<JsonObject> items) {
if (!items.isEmpty()) {
JsonArrayBuilder builder = Json.createArrayBuilder();
items.forEach(i -> builder.add(i));
objectBuilder.add(key, builder.build());
}
}
public static JsonArray toJson(List<AttributeValue> attributeValues) {
if (attributeValues == null) {
return null;
}
JsonArrayBuilder valueBuilder = Json.createArrayBuilder();
for (AttributeValue a : attributeValues) {
add(toJson(a), valueBuilder);
}
return valueBuilder.build();
}
public static JsonObject toJson(Map<String, AttributeValue> attributeValues) {
if (attributeValues == null) {
return null;
}
JsonObjectBuilder valueBuilder = Json.createObjectBuilder();
for (Map.Entry<String, AttributeValue> a : attributeValues.entrySet()) {
add(a.getKey(), toJson(a.getValue()), valueBuilder);
}
return valueBuilder.build();
}
public static void add(String key, Object value, JsonObjectBuilder object) {
if (value instanceof JsonValue) {
object.add(key, (JsonValue) value);
// with json-p 1.0 can't create JsonString or JsonNumber so simply setting JsonValue not an option.
} else if (value instanceof String) {
object.add(key, (String) value);
} else if (value instanceof BigDecimal) {
object.add(key, (BigDecimal) value);
} else if (value instanceof Boolean) {
object.add(key, (Boolean) value);
} else if (value == null || value.equals(JsonValue.NULL)) {
object.addNull(key);
}
}
public static void add(Object value, JsonArrayBuilder array) {
if (value instanceof JsonValue) {
array.add((JsonValue) value);
} else if (value instanceof String) {
array.add((String) value);
} else if (value instanceof BigDecimal) {
array.add((BigDecimal) value);
} else if (value instanceof Boolean) {
array.add((Boolean) value);
} else if (value.equals(JsonValue.NULL)) {
array.addNull();
}
}
public static Object toJson(AttributeValue attributeValue) {
// with json-p 1.1 Json.createValue() can be used.
if (attributeValue == null) {
return null;
}
if (attributeValue.s() != null) {
return attributeValue.s();
}
if (attributeValue.n() != null) {
return new BigDecimal(attributeValue.n());
}
if (attributeValue.bool() != null) {
// return attributeValue.bool() ? JsonValue.TRUE : JsonValue.FALSE;
return attributeValue.bool();
}
if (attributeValue.b() != null) {
// return Base64.getEncoder().encodeToString(attributeValue.b().array());
return null;
}
if (attributeValue.nul() != null && attributeValue.nul()) {
return JsonValue.NULL;
}
if (!attributeValue.m().isEmpty()) {
return toJson(attributeValue.m());
}
if (!attributeValue.l().isEmpty()) {
return toJson(attributeValue.l());
}
if (!attributeValue.ss().isEmpty()) {
return attributeValue.ss();
}
if (!attributeValue.ns().isEmpty()) {
return attributeValue.ns();
}
if (!attributeValue.bs().isEmpty()) {
//return attributeValue.bs();
return null;
}
return null;
}
public static Map<String, AttributeValue> toAttribute(JsonObject jsonObject) {
Map<String, AttributeValue> attribute = new HashMap<>();
jsonObject.entrySet().forEach(e -> {
attribute.put(e.getKey(), toAttribute(e.getValue()));
});
return attribute;
}
public static List<AttributeValue> toAttribute(JsonArray jsonArray) {
List<AttributeValue> attributes = new LinkedList<>();
jsonArray.forEach(e -> {
attributes.add(toAttribute(e));
});
return attributes;
}
public static AttributeValue toAttribute(JsonValue jsonValue) {
if (jsonValue == null) {
return null;
}
switch (jsonValue.getValueType()) {
case STRING:
return AttributeValue.builder().s(((JsonString) jsonValue).getString()).build();
case OBJECT:
return AttributeValue.builder().m(toAttribute((JsonObject) jsonValue)).build();
case ARRAY:
return AttributeValue.builder().l(toAttribute((JsonArray) jsonValue)).build();
case NUMBER:
return AttributeValue.builder().n(((JsonNumber) jsonValue).toString()).build();
case TRUE:
return AttributeValue.builder().bool(true).build();
case FALSE:
return AttributeValue.builder().bool(false).build();
case NULL:
return AttributeValue.builder().nul(true).build();
}
return null;
}
public static AttributeValue compress(Map<String, AttributeValue> attributeValues) throws IOException {
return compress(toJson(attributeValues));
}
public static AttributeValue compress(List<AttributeValue> attributeValues) throws IOException {
return compress(toJson(attributeValues));
}
public static AttributeValue compress(JsonStructure jsonStructure) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Json.createWriter(outputStream).write(jsonStructure);
outputStream.close();
byte[] jsonBinary = outputStream.toByteArray();
outputStream = new ByteArrayOutputStream();
Deflater deflater = new Deflater();
deflater.setInput(jsonBinary);
deflater.finish();
byte[] buffer = new byte[1024];
while (!deflater.finished()) {
int count = deflater.deflate(buffer); // returns the generated code... index
outputStream.write(buffer, 0, count);
}
outputStream.close();
jsonBinary = outputStream.toByteArray();
return AttributeValue.builder().b(SdkBytes.fromByteArray(jsonBinary)).build();
}
public static JsonStructure decompress(AttributeValue attributeValue) throws IOException, DataFormatException {
Inflater inflater = new Inflater();
byte[] jsonBinary = attributeValue.b().asByteArray();
inflater.setInput(jsonBinary);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(jsonBinary.length);
byte[] buffer = new byte[1024];
while (!inflater.finished()) {
int count = inflater.inflate(buffer);
outputStream.write(buffer, 0, count);
}
outputStream.close();
byte[] output = outputStream.toByteArray();
ByteArrayInputStream bis = new ByteArrayInputStream(output);
return Json.createReader(bis).read();
}
}
This class is an updated version of the originally introduced in this gist.
This post also provide a link to a Jackson's AtributeValue serializer if your prefer to use that library for JSON serialization.
Related
I need to create a custom UDF in hive to convert array<map<string, string>> into array<struct<key:string, value:string>>
I am trying with the following class:
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
public class ArrayOfMapToArrayOfStructUdf extends GenericUDF {
private static final String UDF_NAME = "convertArrayMapToArrayStruct";
#Override
public String getUdfName() {
return UDF_NAME;
}
#Override
public ObjectInspector initialize(ObjectInspector[] objectInspectors) throws UDFArgumentException {
if (objectInspectors.length != 1) {
throw new UDFArgumentLengthException(UDF_NAME + " takes 1 argument of type array<map<key, value>>");
}
if (!(validateArgumentType(objectInspectors))) {
throw new IllegalArgumentException("Code should never reach this section!");
}
return createReturnObjectInspector();
}
private boolean validateArgumentType(ObjectInspector[] objectInspectors) throws UDFArgumentException {
if (!(objectInspectors[0] instanceof ListObjectInspector)) {
throw new UDFArgumentException("the argument must be of type: array<map<key, value>>");
}
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspectors[0];
if (!(listObjectInspector.getListElementObjectInspector() instanceof MapObjectInspector)) {
throw new UDFArgumentException("the array contents must be of type: map<key, value>");
}
return true;
}
private ObjectInspector createReturnObjectInspector() {
List<String> structFieldNames = Lists.newArrayList("key", "value");
List<ObjectInspector> structFieldObjectInspectors =
Lists.newArrayList(PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaStringObjectInspector);
StructObjectInspector structObjectInspector =
ObjectInspectorFactory.getStandardStructObjectInspector(structFieldNames, structFieldObjectInspectors);
return ObjectInspectorFactory.getStandardListObjectInspector(structObjectInspector);
}
#Override
public Object evaluate(DeferredObject[] deferredObjects) throws HiveException {
if (deferredObjects == null || deferredObjects.length < 1) {
return null;
}
List<Map<String, String>> arrayOfMap = (List<Map<String, String>>) deferredObjects[0].get();
if (arrayOfMap == null) {
return null;
}
List<Object> arrayOfStruct = Lists.newArrayList();
for (Map<String, String> map : arrayOfMap) {
Object[] object = new Object[2];
object[0] = new Text(map.get("key"));
object[1] = new Text(map.get("value"));
arrayOfStruct.add(object);
}
return arrayOfStruct;
}
#Override
public String getDisplayString(String[] strings) {
return UDF_NAME;
}
}
And I'm getting the following error:
Failed with exception java.io.IOException:org.apache.hadoop.hive.ql.metadata.HiveException: Error evaluating convertArrayMapToArrayStruct
I dont know how to build the object to return in the evaluate method.
The column im trying to transform has data as follows:
[{"key": "key1", "value": "value1"}, {"key": "key2", "value": "value2"}, ..., {"key": "keyN", "value": "valueN"}]
Thanks!
This worked:
#Override
public Object evaluate(DeferredObject[] deferredObjects) throws HiveException {
if (deferredObjects == null || deferredObjects.length < 1) {
return null;
}
LazyArray lazyArray = (LazyArray) deferredObjects[0].get();
if (lazyArray == null) {
return null;
}
List<Object> lazyList = lazyArray.getList();
List<Object> finalList = Lists.newArrayList();
for (Object o : lazyList) {
LazyMap lazyMap = (LazyMap) o;
String key = "";
String value = "";
for (Map.Entry<?, ?> entry : lazyMap.getMap().entrySet()) {
if (entry.getKey().toString().equals("key")) {
key = entry.getValue().toString();
} else if (entry.getKey().toString().equals("value")) {
value = entry.getValue().toString();
}
}
finalList.add(Lists.newArrayList(key, value));
}
return finalList;
}
I am trying to write some Parquet records that contain LogicalTypes to JSON. I do this via AvroParquetReader, which gives me an Avro GenericRecord:
GenericData.get().addLogicalTypeConversion(new TimeConversions.TimeMillisConversion());
try (ParquetReader<GenericRecord> parquetReader =
AvroParquetReader.<GenericRecord>builder(new LocalInputFile(this.path))
.withDataModel(GenericData.get())
.build()) {
GenericRecord record = parquetReader.read();
record.toString();
}
record.toString() produces:
{"universe_member_id": 94639, "member_from_dt": 2001-08-31T00:00:00Z, "member_to_dt": 2200-01-01T00:00:00Z}
Notice that this is invalid JSON - the dates are correctly converted as per their LogicalType, but are not surrounded by quotes.
So instead I tried the JsonEncoder:
GenericData.get().addLogicalTypeConversion(new TimeConversions.TimeMillisConversion()); //etc
OutputStream stringOutputStream = new StringOutputStream();
try (ParquetReader<GenericRecord> parquetReader =
AvroParquetReader.<GenericRecord>builder(new LocalInputFile(this.path))
.withDataModel(GenericData.get())
.build()) {
GenericRecord record = parquetReader.read();
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema());
JsonEncoder encoder = EncoderFactory.get().jsonEncoder(record.getSchema(), stringOutputStream);
writer.write(record, encoder);
encoder.flush();
}
but this doesn't convert the date fields at all and bakes the datatype into every record:
{"universe_member_id":{"long":94639},"member_from_dt":{"long":999216000000000},"member_to_dt":{"long":7258118400000000}}
The output I'm looking for is:
{"universe_member_id": 94639, "member_from_dt": "2001-08-31T00:00:00Z", "member_to_dt": "2200-01-01T00:00:00Z"}
How can I correctly write a GenericRecord to JSON?
As you have indicated, the method toString() in class GenericRecord will give you a nearly valid JSON representation.
As you can see in the source code of the GenericData class, the GenericData.Record toString method just invoke the GenericData toString(Object) method in its implementation.
If you want a valid JSON representation of the record, you can take that code and, with minimal modifications, obtain the information that you need.
For instance, we can define an utility class like the following:
package stackoverflow.parquetavro;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.function.Function;
import org.apache.avro.LogicalType;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericContainer;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericEnumSymbol;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
public class GenericRecordJsonEncoder {
Map<LogicalType, Function<Object, Object>> logicalTypesConverters = new HashMap<>();
public void registerLogicalTypeConverter(LogicalType logicalType, Function<Object, Object> converter) {
this.logicalTypesConverters.put(logicalType, converter);
}
public Function<Object, Object> getLogicalTypeConverter(Schema.Field field) {
Schema fieldSchema = field.schema();
LogicalType logicalType = fieldSchema.getLogicalType();
return getLogicalTypeConverter(logicalType);
}
public Function<Object, Object> getLogicalTypeConverter(LogicalType logicalType) {
if (logicalType == null) {
return Function.identity();
}
return logicalTypesConverters.getOrDefault(logicalType, Function.identity());
}
public String serialize(GenericRecord value) {
StringBuilder buffer = new StringBuilder();
serialize(value, buffer, new IdentityHashMap<>(128) );
String result = buffer.toString();
return result;
}
private static final String TOSTRING_CIRCULAR_REFERENCE_ERROR_TEXT =
" \">>> CIRCULAR REFERENCE CANNOT BE PUT IN JSON STRING, ABORTING RECURSION <<<\" ";
/** Renders a Java datum as JSON. */
private void serialize(final Object datum, final StringBuilder buffer, final IdentityHashMap<Object, Object> seenObjects) {
if (isRecord(datum)) {
if (seenObjects.containsKey(datum)) {
buffer.append(TOSTRING_CIRCULAR_REFERENCE_ERROR_TEXT);
return;
}
seenObjects.put(datum, datum);
buffer.append("{");
int count = 0;
Schema schema = getRecordSchema(datum);
for (Schema.Field f : schema.getFields()) {
serialize(f.name(), buffer, seenObjects);
buffer.append(": ");
Function<Object, Object> logicalTypeConverter = getLogicalTypeConverter(f);
serialize(logicalTypeConverter.apply(getField(datum, f.name(), f.pos())), buffer, seenObjects);
if (++count < schema.getFields().size())
buffer.append(", ");
}
buffer.append("}");
seenObjects.remove(datum);
} else if (isArray(datum)) {
if (seenObjects.containsKey(datum)) {
buffer.append(TOSTRING_CIRCULAR_REFERENCE_ERROR_TEXT);
return;
}
seenObjects.put(datum, datum);
Collection<?> array = getArrayAsCollection(datum);
buffer.append("[");
long last = array.size()-1;
int i = 0;
for (Object element : array) {
serialize(element, buffer, seenObjects);
if (i++ < last)
buffer.append(", ");
}
buffer.append("]");
seenObjects.remove(datum);
} else if (isMap(datum)) {
if (seenObjects.containsKey(datum)) {
buffer.append(TOSTRING_CIRCULAR_REFERENCE_ERROR_TEXT);
return;
}
seenObjects.put(datum, datum);
buffer.append("{");
int count = 0;
#SuppressWarnings(value="unchecked")
Map<Object,Object> map = (Map<Object,Object>)datum;
for (Map.Entry<Object,Object> entry : map.entrySet()) {
serialize(entry.getKey(), buffer, seenObjects);
buffer.append(": ");
serialize(entry.getValue(), buffer, seenObjects);
if (++count < map.size())
buffer.append(", ");
}
buffer.append("}");
seenObjects.remove(datum);
} else if (isString(datum)|| isEnum(datum)) {
buffer.append("\"");
writeEscapedString(datum.toString(), buffer);
buffer.append("\"");
} else if (isBytes(datum)) {
buffer.append("{\"bytes\": \"");
ByteBuffer bytes = ((ByteBuffer) datum).duplicate();
writeEscapedString(StandardCharsets.ISO_8859_1.decode(bytes), buffer);
buffer.append("\"}");
} else if (((datum instanceof Float) && // quote Nan & Infinity
(((Float)datum).isInfinite() || ((Float)datum).isNaN()))
|| ((datum instanceof Double) &&
(((Double)datum).isInfinite() || ((Double)datum).isNaN()))) {
buffer.append("\"");
buffer.append(datum);
buffer.append("\"");
} else if (datum instanceof GenericData) {
if (seenObjects.containsKey(datum)) {
buffer.append(TOSTRING_CIRCULAR_REFERENCE_ERROR_TEXT);
return;
}
seenObjects.put(datum, datum);
serialize(datum, buffer, seenObjects);
seenObjects.remove(datum);
} else {
// This fallback is the reason why GenericRecord toString does not
// generate a valid JSON representation
buffer.append(datum);
}
}
// All these methods are also copied from the GenericData class source
private boolean isRecord(Object datum) {
return datum instanceof IndexedRecord;
}
private Schema getRecordSchema(Object record) {
return ((GenericContainer)record).getSchema();
}
private Object getField(Object record, String name, int position) {
return ((IndexedRecord)record).get(position);
}
private boolean isArray(Object datum) {
return datum instanceof Collection;
}
private Collection getArrayAsCollection(Object datum) {
return (Collection)datum;
}
private boolean isEnum(Object datum) {
return datum instanceof GenericEnumSymbol;
}
private boolean isMap(Object datum) {
return datum instanceof Map;
}
private boolean isString(Object datum) {
return datum instanceof CharSequence;
}
private boolean isBytes(Object datum) {
return datum instanceof ByteBuffer;
}
private void writeEscapedString(CharSequence string, StringBuilder builder) {
for(int i = 0; i < string.length(); i++){
char ch = string.charAt(i);
switch(ch){
case '"':
builder.append("\\\"");
break;
case '\\':
builder.append("\\\\");
break;
case '\b':
builder.append("\\b");
break;
case '\f':
builder.append("\\f");
break;
case '\n':
builder.append("\\n");
break;
case '\r':
builder.append("\\r");
break;
case '\t':
builder.append("\\t");
break;
default:
// Reference: http://www.unicode.org/versions/Unicode5.1.0/
if((ch>='\u0000' && ch<='\u001F') || (ch>='\u007F' && ch<='\u009F') || (ch>='\u2000' && ch<='\u20FF')){
String hex = Integer.toHexString(ch);
builder.append("\\u");
for(int j = 0; j < 4 - hex.length(); j++)
builder.append('0');
builder.append(hex.toUpperCase());
} else {
builder.append(ch);
}
}
}
}
}
In this class you can register converters for the logical types that you need. Consider the following example:
GenericRecordJsonEncoder encoder = new GenericRecordJsonEncoder();
// Register as many logical types converters as you need
encoder.registerLogicalTypeConverter(LogicalTypes.timestampMillis(), o -> {
final Instant instant = (Instant)o;
final String result = DateTimeFormatter.ISO_INSTANT.format(instant);
return result;
});
String json = encoder.serialize(genericRecord);
System.out.println(json);
This will provide you the desired result.
#Override
public Long createPost(Request request) {
Base.open();
ObjectMapper mapper = new ObjectMapper();
try {
Post newPost = mapper.readValue(request.body(), Post.class);
// Map values = ... initialize map
// newPost.saveIt();
} catch (IOException ex) {
Logger.getLogger(PostServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
}
Base.close();
return 1L;
}
From the official docs, this Map values = ... initialize map is not clear. I can do newPost.set("first_name", "Sam") but is there a better way instead of setting values likes this?
I'm not familiar with Spark (I'm the author of ActiveWeb ), but you can use filters to open/close connections instead of polluting your service classes:
http://sparkjava.com/documentation.html#filters
Additionally, if you can convert your request parameters into a java.util.Map, you then do this:
Post post = new Post();
post.fromMap(parameters);
if(post.save()){
//display success
}else{
Errors errors = post.errors();
//display errors
}
This is an example from ActiveWeb, but will help you with Spark too:
https://github.com/javalite/activeweb-simple/blob/master/src/main/java/app/controllers/BooksController.java
If I'm understanding correctly, you are looking at how to take values from a POST request and then use ActiveJDBC to save those values. I'm quite new as well and we are in the beginning stages of our app, but we are using SparkJava with ActiveJDBC.
The example is actual code, I didn't have time to simplify it. But basically we created a POJO for the model class. We originally extended the org.javalite.activejdbc.Model but we needed to handle audit fields (create, update user/time) and help translate from JSON, so we extended this with a custom class called CecilModel. But CecilModel extends the Model class.
We have a controller that receives the request. The request comes in as JSON that matches the field names of our model class. In our custom CecilModel class we map the JSON to a Map which then we use Model.fromMap method to hyrdrate the fields and puts it into our custom model POJO. We don't need the getters or setters, it's more for convenience. We just need our JSON request to have the same names as in our model.
Below is our code but maybe you can peek through it to see how we are doing it.
Our table model pojo.
package com.brookdale.model;
import java.sql.Timestamp;
import org.javalite.activejdbc.Model;
import org.javalite.activejdbc.annotations.BelongsTo;
import org.javalite.activejdbc.annotations.BelongsToParents;
import org.javalite.activejdbc.annotations.IdGenerator;
import org.javalite.activejdbc.annotations.IdName;
import org.javalite.activejdbc.annotations.Table;
import com.brookdale.model.activejdbc.CecilModel;
// This class handles mapping of data from the database to objects
// and back, including custom selection queries.
#Table("RECURRINGITEMSCHEDULE")
#BelongsToParents({
#BelongsTo(foreignKeyName="itemID",parent=Item.class),
#BelongsTo(foreignKeyName="contactAgreementID",parent=ContactAgreement.class),
#BelongsTo(foreignKeyName="unitOfMeasureCode",parent=UnitOfMeasure.class)
})
#IdGenerator("SQ_RECURRINGITEMSCHEDULE.nextVal")
#IdName("recurringItemScheduleID")
public class RecurringItem extends CecilModel {
public Long getRecurringItemScheduleID() {
return getLong("recurringItemScheduleID");
}
public void setRecurringItemScheduleID(Long recurringItemScheduleID) {
set("recurringItemScheduleID",recurringItemScheduleID);
}
public Long getContactAgreementID() {
return getLong("contactAgreementID");
}
public void setContactAgreementID(Long contactAgreementID) {
set("contactAgreementID",contactAgreementID);
}
public Long getItemID() {
return getLong("itemID");
}
public void setItemID(Long itemID) {
set("itemID",itemID);
}
public Double getUnitChargeAmt() {
return getDouble("unitChargeAmt");
}
public void setUnitChargeAmt(Double unitChargeAmt) {
set("unitChargeAmt",unitChargeAmt);
}
public Integer getUnitQty() {
return getInteger("unitQty");
}
public void setUnitQty(Integer unitQty) {
set("unitQty",unitQty);
}
public String getUnitOfMeasureCode() {
return getString("unitOfMeasureCode");
}
public void setUnitOfMeasureCode(String unitOfMeasureCode) {
set("unitOfMeasureCode",unitOfMeasureCode);
}
public Timestamp getLastGeneratedPeriodEndDate() {
return getTimestamp("lastGeneratedPeriodEndDate");
}
public void setLastGeneratedPeriodEndDate(Timestamp lastGeneratedPeriodEndDate) {
set("lastGeneratedPeriodEndDate",lastGeneratedPeriodEndDate);
}
public Timestamp getEffEndDate() {
return getTimestamp("effEndDate");
}
public void setEffEndDate(Timestamp effEndDate) {
set("effEndDate",effEndDate);
}
public Timestamp getEffStartDate() {
return getTimestamp("effStartDate");
}
public void setEffStartDate(Timestamp effStartDate) {
set("effStartDate",effStartDate);
}
#Override
public void validate() {
validatePresenceOf("unitofmeasurecode","itemid","unitqty","effstartdate","unitChargeAmt","contactAgreementID");
validateNumericalityOf("itemid","unitQty","contactAgreementID");
// check to make sure this is an update operation
if (!this.isNew()) {
RecurringItem ridb = RecurringItem.findById(this.getId());
if (ridb.getLastGeneratedPeriodEndDate() != null) {
if (this.getItemID() != ridb.getItemID())
this.addError("itemid", "Item can not be updated once a charge has been created.");
if (!this.getEffStartDate().equals(ridb.getEffStartDate()))
this.addError("effstartdate", "Effective start date can not be updated once a charge has been created.");
if (this.getUnitChargeAmt() != ridb.getUnitChargeAmt())
this.addError("unitchargeamt", "Unit charge amount can not be updated after last generated period end date has been set.");
if (this.getUnitQty() != ridb.getUnitQty())
this.addError("unitqty", "Unit quantity can not be updated after last generated period end date has been set.");
if (!this.getUnitOfMeasureCode().equals(ridb.getUnitOfMeasureCode()))
this.addError("", "Unit of measure can not be updated after last generated period end date has been set.");
}
}
if (this.getEffEndDate() != null && this.getEffStartDate().compareTo(this.getEffEndDate()) >= 0) {
this.addError("effenddate", "Effective end date can not come before the start date.");
}
}
}
Extends our custom Model class. This will extend the actual ActiveJDBC Model class.
package com.brookdale.model.activejdbc;
import java.io.IOException;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import org.javalite.activejdbc.Model;
import org.javalite.activejdbc.validation.ValidationBuilder;
import org.javalite.activejdbc.validation.ValidatorAdapter;
import com.brookdale.core.CLArgs;
import com.brookdale.security.bo.User;
public abstract class CecilModel extends Model {
private static final transient TypeReference<HashMap<String, Object>> mapType = new TypeReference<HashMap<String, Object>>() {};
private static final transient TypeReference<LinkedList<HashMap<String, Object>>> listMapType = new TypeReference<LinkedList<HashMap<String, Object>>>() {};
private static final transient SimpleDateFormat jsonDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
public Timestamp getUpdateDateTime() {
return getTimestamp("updateDateTime");
}
public void setUpdateDateTime(LocalDateTime updateDateTime) {
set("updateDateTime",updateDateTime == null ? null : Timestamp.valueOf(updateDateTime));
}
public Timestamp getCreateDateTime() {
return getTimestamp("createDateTime");
}
public void setCreateDateTime(LocalDateTime createDateTime) {
set("createDateTime",createDateTime == null ? null : Timestamp.valueOf(createDateTime));
}
public String getUpdateUsername() {
return getString("updateUsername");
}
public void setUpdateUsername(String updateUsername) {
set("updateUsername",updateUsername);
}
public String getCreateUsername() {
return getString("createUsername");
}
public void setCreateUsername(String createUsername) {
set("createUsername",createUsername);
}
public Long getUpdateTimeId() {
return getLong("updateTimeId");
}
public void setUpdateTimeId(Long updateTimeId) {
set("updateTimeId",updateTimeId);
}
public boolean save(User user) {
String userId = (CLArgs.args.isAuthenabled()) ? user.getUserid() : "TEST_MODE";
// insert
java.sql.Timestamp now = java.sql.Timestamp.valueOf(java.time.LocalDateTime.now());
if (this.getId() == null || this.getId().toString().equals("0")) {
this.setId(null);
this.set("createDateTime", now);
this.set("createUsername", userId);
this.set("updateDateTime", now);
this.set("updateUsername", userId);
this.set("updateTimeId", 1);
}
// update
else {
Long updatetimeid = this.getLong("updateTimeid");
this.set("updateDateTime", now);
this.set("updateUsername", userId);
this.set("updateTimeId", updatetimeid == null ? 1 : updatetimeid + 1);
}
return super.save();
}
public boolean saveIt(User user) {
String userId = (CLArgs.args.isAuthenabled()) ? user.getUserid() : "TEST_MODE";
// insert
java.sql.Timestamp now = java.sql.Timestamp.valueOf(java.time.LocalDateTime.now());
if (this.isNew()) {
this.setId(null);
this.set("createDateTime", now);
this.set("createUsername", userId);
this.set("updateDateTime", now);
this.set("updateUsername", userId);
this.set("updateTimeId", 1);
}
// update
else {
Long updatetimeid = this.getLong("updateTimeid");
this.set("updateDateTime", now);
this.set("updateUsername", userId);
this.set("updateTimeId", updatetimeid == null ? 1 : updatetimeid + 1);
}
return super.saveIt();
}
public boolean saveModel(User user, boolean insert) {
if(insert){
this.insertIt(user);
}else{
this.updateIt(user);
}
return super.saveIt();
}
public boolean insertIt(User user) {
// insert
java.sql.Timestamp now = java.sql.Timestamp.valueOf(java.time.LocalDateTime.now());
this.setId(null);
this.set("createDateTime", now);
this.set("createUsername", user.getUserid());
this.set("updateDateTime", now);
this.set("updateUsername", user.getUserid());
this.set("updateTimeId", 1);
return super.saveIt();
}
public boolean updateIt(User user) {
// insert
java.sql.Timestamp now = java.sql.Timestamp.valueOf(java.time.LocalDateTime.now());
Long updatetimeid = this.getLong("updateTimeid");
this.set("updateDateTime", now);
this.set("updateUsername", user.getUserid());
this.set("updateTimeId", updatetimeid == null ? 1 : updatetimeid + 1);
return super.saveIt();
}
// Convert a single ActiveJdbc Object to a json string
#SuppressWarnings("unchecked")
public String toJson() {
Map<String, Object> objMap = this.toMap();
try {
if (objMap.containsKey("parents")) {
Map<String, ?> parentsMap = (Map<String, ?>) objMap.get("parents");
for (String key: parentsMap.keySet()) {
objMap.put(key, parentsMap.get(key));
}
objMap.remove("parents");
}
if (objMap.containsKey("children")) {
Map<String, ?> childrenMap = (Map<String, ?>) objMap.get("children");
for (String key: childrenMap.keySet()) {
objMap.put(key, childrenMap.get(key));
}
objMap.remove("children");
}
ObjectMapper mapper = new ObjectMapper();
mapper.setDateFormat(jsonDateFormat);
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(objMap);
} catch (Exception e) { throw new RuntimeException(e); }
}
// Converts a single json object into an ActiveJdbc Model
public <T extends CecilModel> T toObj(String json) {
try {
Map<String, Object> objMap = toMap(json);
convertDatesToTimestamp(objMap);
return this.fromMap(objMap);
} catch (Exception e) { throw new RuntimeException(e); }
}
// STATIC CONVERTERS FOR COLLECTIONS OF OBJECTS
// Convert an ActiveJdbc LazyList Collection to a JSON string
public static <T extends Model> String toJson(Collection<T> objCollection) {
//objCollection.load();
StringBuffer json = new StringBuffer("[ ");
for (T obj: objCollection) {
if (CecilModel.class.isInstance(obj)) {
json.append(((CecilModel)obj).toJson() + ",");
} else {
try {
json.append(new ObjectMapper().writeValueAsString(obj));
} catch (Exception e) {
e.printStackTrace();
}
}
}
return json.substring(0, json.length()-1) + "]";
}
// Converts an array of json objects into a list of ActiveJdbc Models
public static <T extends Model> List<T> toObjList(String json, Class<T> cls) {
List<T> results = new LinkedList<T>();
try {
List<Map<String, Object>> objMapList = toMaps(json);
for (Map<String, Object> objMap: objMapList) {
convertDatesToTimestamp(objMap);
T obj = cls.newInstance().fromMap(objMap);
results.add(obj);
}
} catch (Exception e) { throw new RuntimeException(e); }
return results;
}
// Converts a single json object into a map of key:value pairs
private static Map<String, Object> toMap(String json) {
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readValue(json, mapType);
} catch (IOException e) { throw new RuntimeException(e); }
}
// Converts an array of json objects into a list of maps
private static List<Map<String, Object>> toMaps(String json) {
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readValue(json, listMapType);
} catch (IOException e) { throw new RuntimeException(e); }
}
// checks for Strings that are formatted in 'yyyy-MM-ddTHH:mm:ss.SSSZ' format
// if found it will replace the String value with a java.sql.Timestamp value in the objMap
private static final Map<String,Object> convertDatesToTimestamp(Map<String, Object> objMap) {
// attempt to convert all dates to java.sql.Timestamp
for (String key: objMap.keySet()) {
Object value = objMap.get(key);
System.out.println("Checking if '" + key + "=" + (value == null ? "null" : value.toString()) +"' is a date...");
if (value instanceof String && ((String) value).matches("^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d{3}Z$")) {
String valuestr = (String) value;
System.out.println("DATE FOUND FOR '" + key + "' " + value);
objMap.put(key, Timestamp.valueOf(ZonedDateTime.parse(valuestr).toLocalDateTime()));
}
}
return objMap;
}
public static ValidationBuilder<?> validateAbsenceOf(String ... attributes) {
return validateWith(new ValidatorAdapter() {
#Override
public void validate(Model m) {
boolean containsAttribute = false;
for(String attribute:attributes) {
if(m.attributeNames().contains(attribute)) {
//model contains attribute, invalidate now !
m.addValidator(this, attribute);
break;
}
}
}
});
}
}
Our Controller
package com.brookdale.controller;
import static spark.Spark.get;
import static spark.Spark.post;
import static spark.Spark.put;
import org.codehaus.jackson.map.ObjectMapper;
import com.brookdale.model.RecurringItem;
import com.brookdale.model.activejdbc.CecilModel;
import com.brookdale.security.bo.User;
import com.brookdale.service.RecurringItemService;
public class RecurringItemController {
public RecurringItemController(final RecurringItemService service) {
// get list of recurring items based on the agreementid
get("/api/recurring/list/:agreementid", (req,res)-> {
String all = req.queryParams("all");
Long agreementid = Long.parseLong(req.params(":agreementid"));
//return RecurringItemAPI.searchByAgreement(Long.parseLong(req.params(":agreementid"))).toJson(true);
return CecilModel.toJson(service.findByAgreementId(agreementid, all != null));
});
// insert
post("/api/recurring/save", (req,res)-> {
//RecurringItem ri = ActiveJdbcJson.toObj(req.body(), RecurringItem.class);
RecurringItem ri = new RecurringItem().toObj(req.body());
// set unitqty to '1' since the field is not nullable, but not used
if (ri.getUnitQty() == null)
ri.setUnitQty(1);
System.out.println("ri to insert: " + new ObjectMapper().writeValueAsString(ri));
return service.saveRecurringItem(ri, (User) req.attribute("user")).toJson();
});
// update
put("/api/recurring/save", (req,res)-> {
//RecurringItem ri = ActiveJdbcJson.toObj(req.body(), RecurringItem.class);
RecurringItem ri = new RecurringItem().toObj(req.body());
System.out.println("ri to update: " + new ObjectMapper().writeValueAsString(ri));
return service.saveRecurringItem(ri, (User) req.attribute("user")).toJson();
});
}
}
Which calls our service layer to do the save.
package com.brookdale.service;
import org.javalite.activejdbc.LazyList;
import org.javalite.activejdbc.Model;
import com.brookdale.model.RecurringItem;
import com.brookdale.security.bo.User;
public class RecurringItemService {
public RecurringItemService() { }
public LazyList<Model> findByAgreementId(Long agreementId, boolean includeAll) {
if (includeAll)
return RecurringItem.find("contactAgreementID = ?", agreementId).orderBy("effstartdate desc");
else
return RecurringItem.find("contactAgreementID = ? and effenddate is null", agreementId).orderBy("effstartdate desc");
}
public RecurringItem saveRecurringItem(RecurringItem ri, User user) {
ri.saveIt(user);
return ri;
}
}
I'm trying to get the JSON values from Distance24 JSON output via Google GSON.
But I can't figure out what and where the Exception comes from (I'm using Google AppEngine with Java).
Here's the class from which i send and get the request and response.
package de.tum.in.eist.distance;
import java.io.IOException;
import javax.inject.Inject;
import java.net.URL;
import com.google.appengine.api.urlfetch.HTTPResponse;
import com.google.appengine.api.urlfetch.URLFetchService;
import com.google.gson.JsonObject;
import de.tum.in.eist.JsonHelper;
import de.tum.in.eist.URLFetchServiceHelper;
public class Distance24Client {
private final URLFetchService service;
#Inject
public Distance24Client(URLFetchService service) {
this.service = service;
}
public Distance24 getDistanceAPI(String source, String destination) throws IOException {
URL url = new URL("http://www.distance24.org/route.json?stops=" + source + "|" + destination);
HTTPResponse response = service.fetch(url);
String jsonString = URLFetchServiceHelper.toString(response);
try {
JsonObject json = JsonHelper.parse(jsonString);
return toDistance24(json);
} catch (Exception e) {
throw new IOException("Error ocurred in getDistanceAPI(): " + e.getMessage());
}
}
private Distance24 toDistance24(JsonObject response) {
if (!(response.get("stops").getAsJsonObject().getAsJsonArray().size() != 0)) {
throw new IllegalArgumentException("No Status set from Distance24 API");
} else {
JsonObject distances = response.get("distances").getAsJsonObject();
return new Distance24(distances);
}
}
}
And here's the Distance24 Object:
package de.tum.in.eist.distance;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class Distance24 {
private int[] distances;
private int totalDistance;
private Double sourceLat;
private Double sourceLon;
private Double destLat;
private Double destLong;
public Distance24(JsonObject distances) {
this.setDistances(getIntArray(distances));
this.setTotalDistance(getSum(this.distances));
this.setSourceLat(distances.get("stops").getAsJsonObject().getAsJsonArray().get(0).getAsJsonObject().get("latitude").getAsDouble());
this.setSourceLon(distances.get("stops").getAsJsonObject().getAsJsonArray().get(0).getAsJsonObject().get("longitude").getAsDouble());
this.setDestLat(distances.get("stops").getAsJsonObject().getAsJsonArray().get(1).getAsJsonObject().get("latitude").getAsDouble());
this.setDestLong(distances.get("stops").getAsJsonObject().getAsJsonArray().get(1).getAsJsonObject().get("longitude").getAsDouble());
}
private int[] getIntArray(JsonObject array) {
JsonArray distances = array.getAsJsonArray();
int[] result = new int[distances.size()];
for(int i = 0; i < distances.size(); i++) {
result[i] = distances.get(i).getAsInt();
}
return result;
}
private int getSum(int[] array) {
int sum = 0;
for(int element : array) {
sum += element;
}
return sum;
}
private void setDistances(int[] distances) {
this.distances = distances;
}
public int getTotalDistance() {
return totalDistance;
}
public void setTotalDistance(int totalDistance) {
this.totalDistance = totalDistance;
}
public Double getSourceLat() {
return sourceLat;
}
public void setSourceLat(Double sourceLat) {
this.sourceLat = sourceLat;
}
public Double getSourceLon() {
return sourceLon;
}
public void setSourceLon(Double sourceLon) {
this.sourceLon = sourceLon;
}
public Double getDestLat() {
return destLat;
}
public void setDestLat(Double destLat) {
this.destLat = destLat;
}
public Double getDestLong() {
return destLong;
}
public void setDestLong(Double destLong) {
this.destLong = destLong;
}
}
As a result, I get the whole JSON Object as a String output for e.getMessage(). So I guess the information retrieving works, even though it's on the wrong part of the code.
Plus in the same try-catch-block of the code (Distance24Client, method "toDistance24") it says, the error ocurred in line 30, which is the return statement of the "toDistance24" method.
(clickable)
Running http://www.distance24.org/route.json?stops=detroit|dublin from my browser gives me
{"stops":[{"region":"Michigan ...
"distances":[5581]}
So distances is an array and not an object.
So your line:
JsonObject distances = response.get("distances").getAsJsonObject();
is wrong. Read distances as a JsonArray.
Create a method to handle array or no-array
public static JsonElement toJsonElement(String jsonString) {
JsonParser parser = new JsonParser();
JsonElement jsonElement = parser.parse(jsonString);
JsonElement result = null;
if (jsonElement instanceof JsonObject) {
result = jsonElement.getAsJsonObject();
} else if (jsonElement instanceof JsonArray) {
result = jsonElement.getAsJsonArray();
} else {
throw new IllegalArgumentException(jsonString + " is not valid JSON stirng");
}
return result;
}
I'm looking to implement a custom hadoop Writable class where one of the fields is a time stamp. I can't seem to find a class in the hadoop libraries (e. g. a Writable for Date or Calendar) which would make this easy. I'm thinking of creating a custom writable using get/setTimeInMillis on Calendar, but I'm wondering if there is a better/built-in solution to this problem.
There is no Writable for a Calendar/Date in Hadoop. Considering that you can get the timeInMillis as a long from the Calendar object you can use the LongWritable to serialiaze a calendar object if and only if your application always uses the default UTC time zone (i.e. it's "agnostic" to time zones, it always assumes that timeInMillis represent an UTC time).
If you use another time zone or if your application needs to be able to interpret a timeInMillis with respect to various time zone, you'll have to write from scratch a default Writable implementation.
Here's a custom writable that I generated for you to illustrate a writable with three properties, one of which is a date. You can see that the data value is persisted as a long and that it's easy to convert a long to and from a Date. If having three properties is too much, I can just generate a writable with a date for you.
package com.lmx.writable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import com.eaio.uuid.UUID;
import org.apache.hadoop.io.*;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultDataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class MyCustomWritable implements Writable {
public static int PROPERTY_DATE = 0;
public static int PROPERTY_COUNT = 1;
public static int PROPERTY_NAME = 2;
private boolean[] changeFlag = new boolean[3];
private Date _date;
private int _count;
private String _name;
public MyCustomWritable() {
resetChangeFlags();
}
public MyCustomWritable(Date _date, int _count, String _name) {
resetChangeFlags();
setDate(_date);
setCount(_count);
setName(_name);
}
public MyCustomWritable(byte[] bytes) {
ByteArrayInputStream is = new ByteArrayInputStream(bytes);
DataInput in = new DataInputStream(is);
try { readFields(in); } catch (IOException e) { }
resetChangeFlags();
}
public Date getDate() {
return _date;
}
public void setDate(Date value) {
_date = value;
changeFlag[PROPERTY_DATE] = true;
}
public int getCount() {
return _count;
}
public void setCount(int value) {
_count = value;
changeFlag[PROPERTY_COUNT] = true;
}
public String getName() {
return _name;
}
public void setName(String value) {
_name = value;
changeFlag[PROPERTY_NAME] = true;
}
public void readFields(DataInput in) throws IOException {
// Read Date _date
if (in.readBoolean()) {
_date = new Date(in.readLong());
changeFlag[PROPERTY_DATE] = true;
} else {
_date = null;
changeFlag[PROPERTY_DATE] = false;
}
// Read int _count
_count = in.readInt();
changeFlag[PROPERTY_COUNT] = true;
// Read String _name
if (in.readBoolean()) {
_name = Text.readString(in);
changeFlag[PROPERTY_NAME] = true;
} else {
_name = null;
changeFlag[PROPERTY_NAME] = false;
}
}
public void write(DataOutput out) throws IOException {
// Write Date _date
if (_date == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeLong(_date.getTime());
}
// Write int _count
out.writeInt(_count);
// Write String _name
if (_name == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
Text.writeString(out,_name);
}
}
public byte[] getBytes() throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(os);
write(out);
out.flush();
out.close();
return os.toByteArray();
}
public void resetChangeFlags() {
changeFlag[PROPERTY_DATE] = false;
changeFlag[PROPERTY_COUNT] = false;
changeFlag[PROPERTY_NAME] = false;
}
public boolean getChangeFlag(int i) {
return changeFlag[i];
}
public byte[] getDateAsBytes() throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(os);
// Write Date _date
if (_date == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeLong(_date.getTime());
}
out.flush();
out.close();
return os.toByteArray();
}
public byte[] getCountAsBytes() throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(os);
// Write int _count
out.writeInt(_count);
out.flush();
out.close();
return os.toByteArray();
}
public byte[] getNameAsBytes() throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(os);
// Write String _name
if (_name == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
Text.writeString(out,_name);
}
out.flush();
out.close();
return os.toByteArray();
}
public void setDateFromBytes(byte[] b) throws IOException {
ByteArrayInputStream is = new ByteArrayInputStream(b);
DataInput in = new DataInputStream(is);
int len;
// Read Date _date
if (in.readBoolean()) {
_date = new Date(in.readLong());
changeFlag[PROPERTY_DATE] = true;
} else {
_date = null;
changeFlag[PROPERTY_DATE] = false;
}
}
public void setCountFromBytes(byte[] b) throws IOException {
ByteArrayInputStream is = new ByteArrayInputStream(b);
DataInput in = new DataInputStream(is);
int len;
// Read int _count
_count = in.readInt();
changeFlag[PROPERTY_COUNT] = true;
}
public void setNameFromBytes(byte[] b) throws IOException {
ByteArrayInputStream is = new ByteArrayInputStream(b);
DataInput in = new DataInputStream(is);
int len;
// Read String _name
if (in.readBoolean()) {
_name = Text.readString(in);
changeFlag[PROPERTY_NAME] = true;
} else {
_name = null;
changeFlag[PROPERTY_NAME] = false;
}
}
public Tuple asTuple() throws ExecException {
Tuple tuple = TupleFactory.getInstance().newTuple(3);
if (getDate() == null) {
tuple.set(0, (Long) null);
} else {
tuple.set(0, new Long(getDate().getTime()));
}
tuple.set(1, new Integer(getCount()));
if (getName() == null) {
tuple.set(2, (String) null);
} else {
tuple.set(2, getName());
}
return tuple;
}
public static ResourceSchema getPigSchema() throws IOException {
ResourceSchema schema = new ResourceSchema();
ResourceFieldSchema fieldSchema[] = new ResourceFieldSchema[3];
ResourceSchema bagSchema;
ResourceFieldSchema bagField[];
fieldSchema[0] = new ResourceFieldSchema();
fieldSchema[0].setName("date");
fieldSchema[0].setType(DataType.LONG);
fieldSchema[1] = new ResourceFieldSchema();
fieldSchema[1].setName("count");
fieldSchema[1].setType(DataType.INTEGER);
fieldSchema[2] = new ResourceFieldSchema();
fieldSchema[2].setName("name");
fieldSchema[2].setType(DataType.CHARARRAY);
schema.setFields(fieldSchema);
return schema;
}
public static MyCustomWritable fromJson(String source) {
MyCustomWritable obj = null;
try {
JSONObject jsonObj = new JSONObject(source);
obj = fromJson(jsonObj);
} catch (JSONException e) {
System.out.println(e.toString());
}
return obj;
}
public static MyCustomWritable fromJson(JSONObject jsonObj) {
MyCustomWritable obj = new MyCustomWritable();
try {
if (jsonObj.has("date")) {
obj.setDate(new Date(jsonObj.getLong("date")));
}
if (jsonObj.has("count")) {
obj.setCount(jsonObj.getInt("count"));
}
if (jsonObj.has("name")) {
obj.setName(jsonObj.getString("name"));
}
} catch (JSONException e) {
System.out.println(e.toString());
obj = null;
}
return obj;
}
public JSONObject toJson() {
try {
JSONObject jsonObj = new JSONObject();
JSONArray jsonArray;
if (getDate() != null) {
jsonObj.put("date", getDate().getTime());
}
jsonObj.put("count", getCount());
if (getName() != null) {
jsonObj.put("name", getName());
}
return jsonObj;
} catch (JSONException e) { }
return null;
}
public String toJsonString() {
return toJson().toString();
}
}