Im trying to set parser depend on URI.
I was debugging it.
When condition is OK my parserParent is setted, but at the end of setParser() method, parserParent is again null. I was trying to combine with asigning #Autowired annotation in inherit class, but always Im getting the same NullPointer error.
How to fix it ?
CLASS WHERE PROBLEM IS
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import pl.manciak.excelparser.ParseAndSave.ParseCsvAndSaveToDB;
import pl.manciak.excelparser.ParseAndSave.ParseXlsxAndSaveToDb;
import pl.manciak.excelparser.ParseAndSave.ParserParent;
import java.io.IOException;
#RestController
public class RestClientSave {
private ParserParent parserParent;
private ParseCsvAndSaveToDB parseCsvAndSaveToDB;
private ParseXlsxAndSaveToDb parseXlsxAndSaveToDb;
private String whichParser= "csv"; //HARDCODED FOR SIMPLICITY
#Autowired
public void setParser( ParseCsvAndSaveToDB parseCsvAndSaveToDB,
ParseXlsxAndSaveToDb parseXlsxAndSaveToDb) {
if (whichParser.equals("csv")) {
parserParent = parseCsvAndSaveToDB; // HERE PARSER IS SETTED
}else if(whichParser.equals("xlsx")) {
this.parserParent = parseXlsxAndSaveToDb;
}
}
#GetMapping("/save/{whichParser}")
public String save(#PathVariable String whichParser) throws IOException {
this.whichParser= whichParser;
setParser( parseCsvAndSaveToDB, parseXlsxAndSaveToDb); // HERE IS AGAIN NULL
parserParent.save();
return "data saved";
}
}
PARENT CLASS FOR PARSER
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import pl.manciak.excelparser.DataService;
import pl.manciak.excelparser.Entity.LinesEntity;
import pl.manciak.excelparser.Entity.MapEntity;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
#Service
public class ParserParent {
protected DataService dataService;
protected LinesEntity linesEntity;
protected ArrayList<String> list;
protected HashMap<Long, LinesEntity> xlsMapped = new HashMap<>();
protected MapEntity mapEntity = new MapEntity();
#Autowired
public ParserParent(DataService dataService ) {
this.dataService = dataService;
}
public void save() throws IOException {}
}
CHILD CLASS
package pl.manciak.excelparser.ParseAndSave;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import pl.manciak.excelparser.DataService;
import pl.manciak.excelparser.Entity.LinesEntity;
import java.io.*;
import java.util.ArrayList;
import java.util.Collections;
#Service
public class ParseCsvAndSaveToDB extends ParserParent{
#Autowired
public ParseCsvAndSaveToDB(DataService dataService) {
super(dataService);
}
public void save() throws IOException {
//Stream to Read Csv file
FileReader fileReader = new FileReader("usda_sample.csv");
BufferedReader br = new BufferedReader(fileReader);
//read first line
String line = br.readLine();
long mapKey = 0;
while (line != null) {
linesEntity = new LinesEntity(); // create a new LinesEntity for this loop execution
list = new ArrayList<>();
Collections.addAll(list, line.split(","));
line = br.readLine();
linesEntity.setSingleLine(new ArrayList<>(list));
dataService.saveOne(linesEntity);
xlsMapped.put(mapKey, linesEntity);
mapKey++;
}
mapEntity.setMapa(xlsMapped);
System.out.println(xlsMapped);
dataService.save(mapEntity);
}
Problem Sovled ! :D
Here is code:
#RestController
public class RestClientSave {
ParseCsvAndSaveToDB parseCsvAndSaveToDB;
ParseXlsxAndSaveToDb parseXlsxAndSaveToDb;
#Autowired
public RestClientSave(ParseCsvAndSaveToDB parseCsvAndSaveToDB, ParseXlsxAndSaveToDb parseXlsxAndSaveToDb) {
this.parseCsvAndSaveToDB = parseCsvAndSaveToDB;
this.parseXlsxAndSaveToDb = parseXlsxAndSaveToDb;
}
public ParserParent setParser(ParseCsvAndSaveToDB parseCsvAndSaveToDB,
ParseXlsxAndSaveToDb parseXlsxAndSaveToDb,
String whichParser) {
ParserParent parserParent = null;
if (whichParser.equals("csv")) {
return parserParent = parseCsvAndSaveToDB;
}else if(whichParser.equals("xlsx")) {
return parserParent = parseXlsxAndSaveToDb;
}
return parserParent;
}
#GetMapping("/save/{whichParser}")
public String save(#PathVariable String whichParser) throws IOException {
ParserParent parent = setParser( parseCsvAndSaveToDB, parseXlsxAndSaveToDb, whichParser);
parent.save();
return "data saved";
}
}
Related
When working with azure BlobStorage I'm quite new to this topic but I managed to get it working in java. So we have some xml files saved there and collect the file list as strings. Now I've tried to create a unit tests to verify it stays working and since the getFiles() function is a very small I expected it to be very simple to test.
#Override
public List<String> getFiles(ExecutionContext context) {
return StreamSupport.stream(blobContainerClient.listBlobs().spliterator(), true)
.map(BlobItem::getName)
.collect(Collectors.toList());
}
I can mock the com.azure.storage.blob.blobContainerClient and its function listBlobs, but when trying to create the PagedIterable from a simple List I cannot make it fit the right data types or it runs into an endless loop.
Since the functionality is so minimal, we would just skip to test this, but ou of curiosity I just want to know if it could be tested or what is wrong with my code:
import com.azure.core.http.rest.*;
import com.azure.core.util.IterableStream;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.models.BlobItem;
import com.microsoft.azure.functions.ExecutionContext;
import lombok.SneakyThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.core.CoreSubscriber;
import reactor.core.Fuseable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.function.Supplier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.*;
class BlobstoreConnectorListFilesTest {
private final BlobContainerClient blobContainerClientMock = mock(BlobContainerClient.class);
private final ExecutionContext context = mock(ExecutionContext.class);
private final String id1 = UUID.randomUUID().toString();
private final String id2 = UUID.randomUUID().toString();
#BeforeEach
void setUp() {
BlobItem item1 = mock(BlobItem.class);
when(item1.getName()).thenReturn(id1 + ".xml");
BlobItem item2 = mock(BlobItem.class);
when(item2.getName()).thenReturn(id2 + ".xml");
List<BlobItem> arrayList = new ArrayList<>();
arrayList.add(item1);
arrayList.add(item2);
Mono<PagedResponse<BlobItem>> monoSource = new Mono<>() {
private final Page<BlobItem> page = new Page<>() {
#Override
public IterableStream<BlobItem> getElements() {
return new IterableStream<>(Flux.fromIterable(arrayList));
}
#Override
public String getContinuationToken() {
return null;
}
};
final PagedResponseBase<String, BlobItem> pagedResponseBase = new PagedResponseBase<>(null, 200, null, page
, null);
final Fuseable.QueueSubscription<BlobItem> fuseableQueueSubscription = new Fuseable.QueueSubscription<>() {
#Override
public void request(long l) {
}
#SneakyThrows
#Override
public void cancel() {
throw new InterruptedException();
}
#Override
public int size() {
return arrayList.size();
}
#Override
public boolean isEmpty() {
return arrayList.isEmpty();
}
#Override
public void clear() {
arrayList.clear();
}
#Override
public BlobItem poll() {
var value = arrayList.stream().findFirst().orElse(null);
if(value!=null){
arrayList.remove(value);
}
return value;
}
#Override
public int requestFusion(int i) {
return 0;
}
};
#Override
public void subscribe(CoreSubscriber<? super PagedResponse<BlobItem>> coreSubscriber) {
coreSubscriber.onNext(pagedResponseBase);
coreSubscriber.onSubscribe(fuseableQueueSubscription);
}
};
Supplier<Mono<PagedResponse<BlobItem>>> blobItemSupplier = () -> monoSource;
PagedFlux<BlobItem> pagedFlux = new PagedFlux<>(blobItemSupplier);
PagedIterable<BlobItem> leaflets = new PagedIterable<>(pagedFlux);
doReturn(leaflets).when(blobContainerClientMock).listBlobs();
}
#Test
void getAllFiles() {
BlobstoreConnector connector = new BlobstoreConnector(blobContainerClientMock);
List<String> actual = connector.getFiles(context);
assertEquals(2, actual.size());
assertTrue(actual.stream().anyMatch(fileName -> fileName.equals(id1 + ".xml")));
assertTrue(actual.stream().anyMatch(fileName -> fileName.equals(id2 + ".xml")));
}
}
I am trying to build a Kafka source connector for audio files. from my understanding, I have to read the audio files as a Byte array.
I am using the confluent-quick-start project as a skeleton for development. the connector is not working, I can't tell why, because I don't know how to make it print logs for errors. I need help to make this work, I am not an expert in java, you can probably tell by the code.
is my approach correct? and do I have to do anything to the pom.xml file or just leave it as is?
I have examined previously available projects and tried to apply the concept for audio files. the following are the classes:
AudioSourceConnectorConfig
package org.othman.example;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigDef.Importance;
import java.util.Map;
public class AudioSourceConnectorConfig extends AbstractConfig {
public static final String FILENAME_CONFIG="fileName";
private static final String FILENAME_DOC ="Enter the name of the audio file";
public static final String TOPIC_CONFIG = "topic";
private static final String TOPIC_DOC = "Enter the topic to write to..";
public AudioSourceConnectorConfig(ConfigDef config, Map<String, String> parsedConfig) {
super(config, parsedConfig);
}
public AudioSourceConnectorConfig(Map<String, String> parsedConfig) {
this(conf(), parsedConfig);
}
public static ConfigDef conf() {
return new ConfigDef()
.define(FILENAME_CONFIG, Type.STRING, Importance.HIGH, FILENAME_DOC)
.define(TOPIC_CONFIG, Type.STRING, Importance.HIGH, TOPIC_DOC);
}
public String getFilenameConfig(){
return this.getString("fileName");
}
public String getTopicConfig(){
return this.getString("topic");
}
}
AudioSourceConnector
package org.othman.example;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.connector.Task;
import org.apache.kafka.connect.source.SourceConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AudioSourceConnector extends SourceConnector {
private static Logger log = LoggerFactory.getLogger(AudioSourceConnector.class);
private AudioSourceConnectorConfig config;
private Map<String, String> configProps;
#Override
public String version() {
return VersionUtil.getVersion();
}
#Override
public void start(Map<String, String> map) {
//config = new AudioSourceConnectorConfig(map);
this.configProps = new HashMap(map);
//TODO: Add things you need to do to setup your connector.
}
#Override
public Class<? extends Task> taskClass() {
//TODO: Return your task implementation.
return AudioSourceTask.class;
}
#Override
public List<Map<String, String>> taskConfigs(int maxTasks) {
return (List) IntStream.range(0, maxTasks).mapToObj((i) -> {
return new HashMap(this.configProps);
}).collect(Collectors.toList());
//TODO: Define the individual task configurations that will be executed.
}
#Override
public void stop() {
//TODO: Do things that are necessary to stop your connector.
this.configProps=null;
}
#Override
public ConfigDef config() {
return AudioSourceConnectorConfig.conf();
}
}
AudioSourceTask
package org.othman.example;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.source.SourceTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class AudioSourceTask extends SourceTask {
static final Logger log = LoggerFactory.getLogger(AudioSourceTask.class);
AudioSourceConnectorConfig config;
private Process inputProcess;
byte [] audioFile;
#Override
public String version() {
return VersionUtil.getVersion();
}
#Override
public void start(Map<String, String> map) {
//TODO: Do things here that are required to start your task.
// This could be open a connection to a database, etc.
this.config = new AudioSourceConnectorConfig(map);
try{
audioFile = Files.readAllBytes(Paths.get(this.config.getFilenameConfig()));
// this.inputProcess = (new ProcessBuilder((new String[]{this.config.getFilenameConfig()}))).redirectError().start();
}
catch(IOException e){
System.out.println("ERROR WHILE TRYING TO READ AUDIO FILE...");
e.printStackTrace();
}
}
#Override
public List<SourceRecord> poll() throws InterruptedException {
//TODO: Create SourceRecord objects that will be sent the kafka cluster.
final ArrayList<SourceRecord> records = new ArrayList<>();
SourceRecord record;
for (int i=0;i < audioFile.length - 1;i++) {
record= new SourceRecord(null, null, this.config.getTopicConfig(), 0, Schema.BYTES_SCHEMA, audioFile[i]);
records.add(record);
}
return records;
}
#Override
public void stop() {
//TODO: Do whatever is required to stop your task.
}
}
I am writing data to accumulo storage natively using Geomesa Native Client. Here is my java code
package org.locationtech.geomesa.api;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.security.Authorizations;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.feature.AttributeTypeBuilder;
import org.geotools.geometry.jts.JTSFactoryFinder;
import org.junit.Assert;
import org.junit.Test;
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore;
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex;
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex$;
import org.locationtech.geomesa.utils.index.IndexMode$;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.filter.FilterFactory2;
import javax.annotation.Nullable;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
public class WorkerBeta {
public static void main(String[] args){
try {
DomainObjectValueSerializer dovs = new DomainObjectValueSerializer();
final GeoMesaIndex<DomainObject> index = AccumuloGeoMesaIndex.buildWithView(
"aj_v14",
"localhost:2181",
"hps",
"root", "9869547580",
false,
dovs,
new SimpleFeatureView<DomainObject>() {
AttributeTypeBuilder atb = new AttributeTypeBuilder();
private List<AttributeDescriptor> attributeDescriptors =
Lists.newArrayList(atb.binding(Integer.class).buildDescriptor("rId")
, atb.binding(String.class).buildDescriptor("dId")
, atb.binding(Integer.class).buildDescriptor("s")
, atb.binding(Integer.class).buildDescriptor("a")
, atb.binding(Integer.class).buildDescriptor("e")
);
#Override
public void populate(SimpleFeature f, DomainObject domainObject, String id, byte[] payload, Geometry geom, Date dtg) {
f.setAttribute("rId", domainObject.rideId);
f.setAttribute("dId", domainObject.deviceId);
f.setAttribute("s", domainObject.speed);
f.setAttribute("a", domainObject.angle);
f.setAttribute("e", domainObject.error);
}
#Override
public List<AttributeDescriptor> getExtraAttributes() {
return attributeDescriptors;
}
}
);
//Inserting
final DomainObject one = new DomainObject(1, "AJJASsP", 12, 40, 1);
final GeometryFactory gf = JTSFactoryFinder.getGeometryFactory();
System.out.println(index.insert(
one,
gf.createPoint(new Coordinate(-74.0, 34.0)),
date("2017-03-31T01:15:00.000Z")
));
//Read
GeoMesaQuery q = GeoMesaQuery.GeoMesaQueryBuilder.builder()
.within(-90.0, -180, 90, 180)
.during(date("2017-01-01T00:00:00.000Z"), date("2017-04-01T00:00:00.000Z"))
.build();
Iterable<DomainObject> results = index.query(q);
int counter = 0;
for(DomainObject dm : results){
counter += 1;
System.out.println("result counter: " + counter);
dovs.toBytes(dm);
}
}
catch (Exception ex){
ex.printStackTrace();
}
index.close();
}
public static class DomainObject {
public final int rideId;
public final String deviceId;
public final int angle;
public final int speed;
public final int error;
public DomainObject(int rideId, String deviceId, int angle, int speed, int error) {
this.rideId = rideId;
this.deviceId = deviceId;
this.angle = angle;
this.speed = speed;
this.error = error;
}
}
public static class DomainObjectValueSerializer implements ValueSerializer<DomainObject> {
public static final Gson gson = new Gson();
#Override
public byte[] toBytes(DomainObject o) {
return gson.toJson(o).getBytes();
}
#Override
public DomainObject fromBytes(byte[] bytes) {
return gson.fromJson(new String(bytes), DomainObject.class);
}
}
public static Date date(String s) {
return Date.from(ZonedDateTime.parse(s).toInstant());
}
}
The problem with this code is, I need to create index object every time for a new insert request and call index.close() to reflect the same. But I can't execute insert() agin, once index.close() is called. However i will be accepting insert request from queue at very high rate and I don't want to create index object every time. How can i do that?
In short how i can flush writes without calling close().
I created geomesa Client class file to use geomesa natively. Below is the partial implementation of the same which shows how you can flush with AccumuloAppendFeatureWriter without calling to close.
public class GeomesaClient {
private AccumuloDataStore ds = null;
private AccumuloAppendFeatureWriter fw = null;
private SimpleFeatureSource sfs = null;
private String tableName = "";
private FeatureStore fst = null;
private SimpleFeatureType sft;
public GeomesaClient(Map<String, String> dsConf) throws Exception {
this.ds = (AccumuloDataStore) DataStoreFinder.getDataStore(dsConf);
this.tableName = dsConf.get("tableName");
sft = createFeatureType();
if(!Arrays.asList(this.ds.getTypeNames()).contains(sft.getTypeName())){
ds.createSchema(sft);
}
this.fst = (FeatureStore)sfs;
this.fw = (AccumuloAppendFeatureWriter) (this.ds.getFeatureWriterAppend(sft.getTypeName(),
Transaction.AUTO_COMMIT));
this.sfs = ds.getFeatureSource(sft.getTypeName());
}
/*
Flush with AccumuloAppendFeatureWriter
*/
public void flush(boolean force) {
fw.flush();
}
}
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.powerbot.core.script.job.Task;
import org.powerbot.core.script.job.state.Node;
import org.powerbot.core.script.job.state.Tree;
import org.powerbot.game.api.util.Timer;
public abstract class ScriptWrapper extends org.powerbot.core.script.ActiveScript {
public final Timer runTime;
private Tree jobContainer = null;
private List<Node> jobs;
public ScriptWrapper() {
runTime = new Timer(0);
jobs = Collections.synchronizedList(new ArrayList<Node>());
}
public final void provide(final Node job) {
if(!jobs.contains(job)) {
jobs.add(job);
jobContainer = new Tree(jobs.toArray(new Node[jobs.size()]));
}
}
public final void revoke(final Node job) {
if(jobs.contains(job)) {
jobs.remove(job);
jobContainer = new Tree(jobs.toArray(new Node[jobs.size()]));
}
}
public final void submit(final Task task) {
getContainer().submit(task);
}
public final ScriptWrapper getScriptWrapper() {
return this;
}
public abstract void onStart();
#Override
public int loop(); {
if (jobContainer != null) {
final Node job = jobContainer.state();
if (job != null) {
jobContainer.set(job);
getContainer().submit(job);
job.join();
}
}
return 0;
}
}
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Point;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.regex.Pattern;
import org.powerbot.core.event.listeners.PaintListener;
import org.powerbot.core.script.job.state.Node;
import org.powerbot.game.api.Manifest;
import org.powerbot.game.api.methods.Widgets;
import org.powerbot.game.api.methods.input.Mouse;
import org.powerbot.game.api.methods.interactive.Players;
import org.powerbot.game.api.methods.node.GroundItems;
import org.powerbot.game.api.methods.node.Menu;
import org.powerbot.game.api.util.Filter;
import org.powerbot.game.api.util.Random;
import org.powerbot.game.api.util.Timer;
import org.powerbot.game.api.util.node.Deque;
import org.powerbot.game.api.util.node.Queue;
import org.powerbot.game.api.wrappers.Tile;
import org.powerbot.game.api.wrappers.interactive.Player;
import org.powerbot.game.api.wrappers.node.GroundItem;
import org.powerbot.game.bot.Context;
import org.powerbot.game.client.Client;
import org.powerbot.game.client.MenuGroupNode;
import org.powerbot.game.client.MenuItemNode;
import org.powerbot.game.client.NodeDeque;
import org.powerbot.game.client.NodeSubQueue;
#Manifest(authors = { "Cup" }, name = "c[DropParty]")
public class DropParty extends ScriptWrapper implements PaintListener {
private ArrayList<Tile> locations = new ArrayList<Tile>();
private ArrayList<Long> times = new ArrayList<Long>();
private Timer timer = new Timer(0);
private Player player;
#Override
public void onStart() {
provide(new VexillumCloser());
provide(new StatueCloser());
provide(new Walk());
provide(new Pickup());
provide(new Tracking());
}
private class VexillumCloser extends Node {
#Override
public boolean activate() {
return Widgets.get(1107, 156).validate();
}
#Override
public void execute() {
Mouse.hop((int) Widgets.get(1107, 156).getBoundingRectangle().getCenterX(), (int) Widgets.get(1107, 156).getBoundingRectangle().getCenterY());
Mouse.click(true);
}
}
private class StatueCloser extends Node {
#Override
public boolean activate() {
return Widgets.get(21, 42).validate();
}
#Override
public void execute() {
Mouse.hop((int) Widgets.get(21, 42).getBoundingRectangle().getCenterX(), (int) Widgets.get(21, 42).getBoundingRectangle().getCenterY());
Mouse.click(true);
}
}
private class Walk extends Node {
#Override
public boolean activate() {
return Menu.isOpen() && Menu.contains("Walk");
}
#Override
public void execute() {
if (Menu.isOpen()) {
select(Menu.contains("Take") ? "Take" : "Walk");
}
}
}
The error starts at line 63 :
}
import java.awt.Color;
I've been trying to fix this code sent to me by a friend, but I just can't fix this error; this is probably because I am a beginner, please help thanks.
In Java, each class must be in its own file. The compiler doesn't expect to see any imports (or really anything else) after the end of the ScriptWrapper class definition.
Put DropParty in its own DropParty.java file.
You need to put each class into its own file, unless you nest a private class. You have multiple public classes in the same file, so the compiler doesn't expect to see anything after the closing bracket from the first class definition.
Need to setup JMock code to test call back with google protobuf
Full project is located at http://github.com/andrewmilkowski/template-transport
In short, following are methods signatures (below)
what I need to do is to test method getLongValue, using Jmock JUnit4Mockery
what is the best and cleanest way to go about this
thanks much!
package com.argmaps.client.proto;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.fepss.rpc.server.RpcApplication;
import com.fepss.rpc.client.RpcChannelImpl;
import org.apache.tapestry5.ioc.MappedConfiguration;
import com.google.protobuf.RpcController;
import com.google.protobuf.RpcCallback;
import com.argmaps.transport.proto.generated.TransportServer.ProtoService;
import com.argmaps.transport.proto.generated.TransportServer.ProtoService.Stub;
import com.argmaps.transport.proto.generated.TransportServer.DefaultLongValue;
import com.argmaps.transport.proto.generated.TransportServer.LongValue;
import com.argmaps.transport.proto.fepss.ProtoServer.TransportHandler;
public class TransportClient {
protected final Log LOG = LogFactory.getLog(this.getClass().getName());
private RpcController controller;
private TransportHandler transportHandler;
private ProtoService.Interface service;
private void open(String host, int port) {
RpcChannelImpl channel = new RpcChannelImpl(host, port);
controller = channel.newRpcController();
service = ProtoService.newStub(channel);
}
protected static class LongValueRpcCallback implements RpcCallback<LongValue> {
private long longValue = 0L;
#Override
public void run(LongValue result) {
longValue = result.getLongValue();
}
private long getLongValue() {
return longValue;
}
}
private void close() {
}
public long getLongValue(LongValueRpcCallback longValueRpcCallback) {
DefaultLongValue defaultLongValue = DefaultLongValue.newBuilder().setLongValue(0L).build();
service.getLongValue(controller, defaultLongValue, longValueRpcCallback);
if (LOG.isDebugEnabled()) {
LOG.debug("Long value from server:" + longValueRpcCallback.getLongValue());
}
return longValueRpcCallback.getLongValue();
}
public static void main(String[] args) {
String host = "localhost";
int port = 9090;
final String portArgKey = "--port=";
for (String cmd : args) {
if (cmd.startsWith(portArgKey)) {
port = Integer.parseInt(cmd.substring(portArgKey.length()));
break;
}
}
TransportClient c = new TransportClient();
c.open(host, port);
c.getLongValue(new LongValueRpcCallback());
c.close();
}
public TransportClient() {
}
public static class TransportModule {
public static void contributeIoHandler(MappedConfiguration<String, ProtoService> configruation) {
configruation.add(ProtoService.getDescriptor().getFullName(), new TransportHandler());
}
}
}
Because of the callback, needed to:
create abstract class LongValueRpcCallbackTemplate implements RpcCallback
create class LongValueRpcCallback extends LongValueRpcCallbackTemplate
and then complete implementation in the test class
Test class:
package com.argmaps.client.proto;
import com.argmaps.transport.proto.generated.TransportServer;
import com.fepss.rpc.client.RpcChannelImpl;
import com.google.protobuf.RpcController;
import org.jmock.Expectations;
import org.junit.Test;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JUnit4Mockery;
import static org.junit.Assert.assertEquals;
public class TransportClientTest {
Mockery context;
#Before
public void before() {
context = new JUnit4Mockery();
}
private class TestLongValueRpcCallback extends LongValueRpcCallbackTemplate {
private long longValue = 123456789L;
#Override
protected long getLongValue() {
return longValue;
}
}
#Test
public void testGetLongValue() {
final TransportServer.ProtoService.Interface mockedTransportServer = context.mock(TransportServer.ProtoService.Interface.class);
final RpcChannelImpl channel = new RpcChannelImpl("localhost", 9090);
final RpcController controller = channel.newRpcController();
final TransportServer.DefaultLongValue defaultLongValue = TransportServer.DefaultLongValue.newBuilder().setLongValue(0L).build();
com.argmaps.client.proto.TransportClient testObject = new TransportClient(controller, mockedTransportServer);
final TestLongValueRpcCallback testLongValueRpcCallback = new TestLongValueRpcCallback();
final long testLongValue = 123456789L;
context.checking(new Expectations() {
{
one(mockedTransportServer).getLongValue(controller, defaultLongValue, testLongValueRpcCallback);
}
});
assertEquals(testLongValue, testObject.getLongValue(testLongValueRpcCallback));
}
}