Need to setup JMock code to test call back with google protobuf
Full project is located at http://github.com/andrewmilkowski/template-transport
In short, following are methods signatures (below)
what I need to do is to test method getLongValue, using Jmock JUnit4Mockery
what is the best and cleanest way to go about this
thanks much!
package com.argmaps.client.proto;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.fepss.rpc.server.RpcApplication;
import com.fepss.rpc.client.RpcChannelImpl;
import org.apache.tapestry5.ioc.MappedConfiguration;
import com.google.protobuf.RpcController;
import com.google.protobuf.RpcCallback;
import com.argmaps.transport.proto.generated.TransportServer.ProtoService;
import com.argmaps.transport.proto.generated.TransportServer.ProtoService.Stub;
import com.argmaps.transport.proto.generated.TransportServer.DefaultLongValue;
import com.argmaps.transport.proto.generated.TransportServer.LongValue;
import com.argmaps.transport.proto.fepss.ProtoServer.TransportHandler;
public class TransportClient {
protected final Log LOG = LogFactory.getLog(this.getClass().getName());
private RpcController controller;
private TransportHandler transportHandler;
private ProtoService.Interface service;
private void open(String host, int port) {
RpcChannelImpl channel = new RpcChannelImpl(host, port);
controller = channel.newRpcController();
service = ProtoService.newStub(channel);
}
protected static class LongValueRpcCallback implements RpcCallback<LongValue> {
private long longValue = 0L;
#Override
public void run(LongValue result) {
longValue = result.getLongValue();
}
private long getLongValue() {
return longValue;
}
}
private void close() {
}
public long getLongValue(LongValueRpcCallback longValueRpcCallback) {
DefaultLongValue defaultLongValue = DefaultLongValue.newBuilder().setLongValue(0L).build();
service.getLongValue(controller, defaultLongValue, longValueRpcCallback);
if (LOG.isDebugEnabled()) {
LOG.debug("Long value from server:" + longValueRpcCallback.getLongValue());
}
return longValueRpcCallback.getLongValue();
}
public static void main(String[] args) {
String host = "localhost";
int port = 9090;
final String portArgKey = "--port=";
for (String cmd : args) {
if (cmd.startsWith(portArgKey)) {
port = Integer.parseInt(cmd.substring(portArgKey.length()));
break;
}
}
TransportClient c = new TransportClient();
c.open(host, port);
c.getLongValue(new LongValueRpcCallback());
c.close();
}
public TransportClient() {
}
public static class TransportModule {
public static void contributeIoHandler(MappedConfiguration<String, ProtoService> configruation) {
configruation.add(ProtoService.getDescriptor().getFullName(), new TransportHandler());
}
}
}
Because of the callback, needed to:
create abstract class LongValueRpcCallbackTemplate implements RpcCallback
create class LongValueRpcCallback extends LongValueRpcCallbackTemplate
and then complete implementation in the test class
Test class:
package com.argmaps.client.proto;
import com.argmaps.transport.proto.generated.TransportServer;
import com.fepss.rpc.client.RpcChannelImpl;
import com.google.protobuf.RpcController;
import org.jmock.Expectations;
import org.junit.Test;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JUnit4Mockery;
import static org.junit.Assert.assertEquals;
public class TransportClientTest {
Mockery context;
#Before
public void before() {
context = new JUnit4Mockery();
}
private class TestLongValueRpcCallback extends LongValueRpcCallbackTemplate {
private long longValue = 123456789L;
#Override
protected long getLongValue() {
return longValue;
}
}
#Test
public void testGetLongValue() {
final TransportServer.ProtoService.Interface mockedTransportServer = context.mock(TransportServer.ProtoService.Interface.class);
final RpcChannelImpl channel = new RpcChannelImpl("localhost", 9090);
final RpcController controller = channel.newRpcController();
final TransportServer.DefaultLongValue defaultLongValue = TransportServer.DefaultLongValue.newBuilder().setLongValue(0L).build();
com.argmaps.client.proto.TransportClient testObject = new TransportClient(controller, mockedTransportServer);
final TestLongValueRpcCallback testLongValueRpcCallback = new TestLongValueRpcCallback();
final long testLongValue = 123456789L;
context.checking(new Expectations() {
{
one(mockedTransportServer).getLongValue(controller, defaultLongValue, testLongValueRpcCallback);
}
});
assertEquals(testLongValue, testObject.getLongValue(testLongValueRpcCallback));
}
}
Related
While trying to run my test for the following code. I am getting some error detailed below.
I observed at the time it makes mkdirs() calls in my code throws IllegalArgumentException when I try to run my test.
MyClass.java
package com.javaeasily.demos.junit;
import java.util.ArrayList;
public class MyClass {
private final NodeHelper nodeHelper;
private static final ArrayList<String> ACTIVE_SERVICES_POST_RECONFIGURE = new ArrayList<>();
// Only allow construction if number is greater than one
MyClass() {
ACTIVE_SERVICES_POST_RECONFIGURE.add("my-node-" + NodeUtils.getMyNode());
nodeHelper = new NodeHelper();
}
public void reconfigureNode() {
if (ACTIVE_SERVICES_POST_RECONFIGURE.isEmpty()) {
return;
}
try {
nodeHelper.createStagingDir();
} catch (Exception e) {
throw new RuntimeException("Cannot reconfigure node");
}
}
}
NodeUtils.java
package com.javaeasily.demos.junit;
import org.apache.commons.lang3.StringUtils;
public class NodeUtils {
private static final String HOSTNAME_PREFIX = "my-node-";
public static String hostnameToNode(String hostname) {
if (!hostname.startsWith(HOSTNAME_PREFIX)) {
throw new IllegalArgumentException(hostname + " is not recognized hostname");
}
return StringUtils.removeStart(hostname, HOSTNAME_PREFIX);
}
public static String getHostname() {
return System.getenv("HOSTNAME");
}
public static String getMyNode() {
return hostnameToNode(getHostname());
}
}
NodeHelper.java
package com.javaeasily.demos.junit;
import java.io.File;
public class NodeHelper {
private static final String STAGING_DIR = "/staging/";
public void createStagingDir() {
File stagingDir = new File(STAGING_DIR);
if (!stagingDir.exists() && !stagingDir.mkdirs()) {
throw new IllegalArgumentException("Could not create staging dir");
}
}
}
MyClassTest.java
package com.javaeasily.demos.junit;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
#ExtendWith(MockitoExtension.class)
public class MyClassTest {
private MyClass myclass;
#BeforeEach
public void SetUp() {
try (MockedStatic<NodeUtils> nodeUtilsMockedStatic = Mockito.mockStatic(NodeUtils.class);) {
nodeUtilsMockedStatic.when(NodeUtils::getMyNode).thenReturn("foo");
myclass = new MyClass();
}
}
#Test
public void testReconfigureNode() {
myclass.reconfigureNode();
}
}
When I try to run my test I get following error:
java.lang.RuntimeException: Cannot reconfigure node
at com.javaeasily.demos.junit.MyClass.reconfigureNode(MyClass.java:22)
Post debugging I see that:
java.lang.IllegalArgumentException: Could not create staging dir
Does anyone care to enlighten me as to what I am doing wrong?
To mock NodeHelper you should not create it inside MyClass, but inject it from the outside via the constructor. i.e.
MyClass(Nodehelper nodeHelper) {
ACTIVE_SERVICES_POST_RECONFIGURE.add("my-node-" + NodeUtils.getMyNode());
this.nodeHelper = nodeHelper;
}
This allows you to create a mocked NodeHelper in your test, pass it to MyClass and set the desired behavior as expectations.
When working with azure BlobStorage I'm quite new to this topic but I managed to get it working in java. So we have some xml files saved there and collect the file list as strings. Now I've tried to create a unit tests to verify it stays working and since the getFiles() function is a very small I expected it to be very simple to test.
#Override
public List<String> getFiles(ExecutionContext context) {
return StreamSupport.stream(blobContainerClient.listBlobs().spliterator(), true)
.map(BlobItem::getName)
.collect(Collectors.toList());
}
I can mock the com.azure.storage.blob.blobContainerClient and its function listBlobs, but when trying to create the PagedIterable from a simple List I cannot make it fit the right data types or it runs into an endless loop.
Since the functionality is so minimal, we would just skip to test this, but ou of curiosity I just want to know if it could be tested or what is wrong with my code:
import com.azure.core.http.rest.*;
import com.azure.core.util.IterableStream;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.models.BlobItem;
import com.microsoft.azure.functions.ExecutionContext;
import lombok.SneakyThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.core.CoreSubscriber;
import reactor.core.Fuseable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.function.Supplier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.*;
class BlobstoreConnectorListFilesTest {
private final BlobContainerClient blobContainerClientMock = mock(BlobContainerClient.class);
private final ExecutionContext context = mock(ExecutionContext.class);
private final String id1 = UUID.randomUUID().toString();
private final String id2 = UUID.randomUUID().toString();
#BeforeEach
void setUp() {
BlobItem item1 = mock(BlobItem.class);
when(item1.getName()).thenReturn(id1 + ".xml");
BlobItem item2 = mock(BlobItem.class);
when(item2.getName()).thenReturn(id2 + ".xml");
List<BlobItem> arrayList = new ArrayList<>();
arrayList.add(item1);
arrayList.add(item2);
Mono<PagedResponse<BlobItem>> monoSource = new Mono<>() {
private final Page<BlobItem> page = new Page<>() {
#Override
public IterableStream<BlobItem> getElements() {
return new IterableStream<>(Flux.fromIterable(arrayList));
}
#Override
public String getContinuationToken() {
return null;
}
};
final PagedResponseBase<String, BlobItem> pagedResponseBase = new PagedResponseBase<>(null, 200, null, page
, null);
final Fuseable.QueueSubscription<BlobItem> fuseableQueueSubscription = new Fuseable.QueueSubscription<>() {
#Override
public void request(long l) {
}
#SneakyThrows
#Override
public void cancel() {
throw new InterruptedException();
}
#Override
public int size() {
return arrayList.size();
}
#Override
public boolean isEmpty() {
return arrayList.isEmpty();
}
#Override
public void clear() {
arrayList.clear();
}
#Override
public BlobItem poll() {
var value = arrayList.stream().findFirst().orElse(null);
if(value!=null){
arrayList.remove(value);
}
return value;
}
#Override
public int requestFusion(int i) {
return 0;
}
};
#Override
public void subscribe(CoreSubscriber<? super PagedResponse<BlobItem>> coreSubscriber) {
coreSubscriber.onNext(pagedResponseBase);
coreSubscriber.onSubscribe(fuseableQueueSubscription);
}
};
Supplier<Mono<PagedResponse<BlobItem>>> blobItemSupplier = () -> monoSource;
PagedFlux<BlobItem> pagedFlux = new PagedFlux<>(blobItemSupplier);
PagedIterable<BlobItem> leaflets = new PagedIterable<>(pagedFlux);
doReturn(leaflets).when(blobContainerClientMock).listBlobs();
}
#Test
void getAllFiles() {
BlobstoreConnector connector = new BlobstoreConnector(blobContainerClientMock);
List<String> actual = connector.getFiles(context);
assertEquals(2, actual.size());
assertTrue(actual.stream().anyMatch(fileName -> fileName.equals(id1 + ".xml")));
assertTrue(actual.stream().anyMatch(fileName -> fileName.equals(id2 + ".xml")));
}
}
I am trying to build a Kafka source connector for audio files. from my understanding, I have to read the audio files as a Byte array.
I am using the confluent-quick-start project as a skeleton for development. the connector is not working, I can't tell why, because I don't know how to make it print logs for errors. I need help to make this work, I am not an expert in java, you can probably tell by the code.
is my approach correct? and do I have to do anything to the pom.xml file or just leave it as is?
I have examined previously available projects and tried to apply the concept for audio files. the following are the classes:
AudioSourceConnectorConfig
package org.othman.example;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigDef.Importance;
import java.util.Map;
public class AudioSourceConnectorConfig extends AbstractConfig {
public static final String FILENAME_CONFIG="fileName";
private static final String FILENAME_DOC ="Enter the name of the audio file";
public static final String TOPIC_CONFIG = "topic";
private static final String TOPIC_DOC = "Enter the topic to write to..";
public AudioSourceConnectorConfig(ConfigDef config, Map<String, String> parsedConfig) {
super(config, parsedConfig);
}
public AudioSourceConnectorConfig(Map<String, String> parsedConfig) {
this(conf(), parsedConfig);
}
public static ConfigDef conf() {
return new ConfigDef()
.define(FILENAME_CONFIG, Type.STRING, Importance.HIGH, FILENAME_DOC)
.define(TOPIC_CONFIG, Type.STRING, Importance.HIGH, TOPIC_DOC);
}
public String getFilenameConfig(){
return this.getString("fileName");
}
public String getTopicConfig(){
return this.getString("topic");
}
}
AudioSourceConnector
package org.othman.example;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.connector.Task;
import org.apache.kafka.connect.source.SourceConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AudioSourceConnector extends SourceConnector {
private static Logger log = LoggerFactory.getLogger(AudioSourceConnector.class);
private AudioSourceConnectorConfig config;
private Map<String, String> configProps;
#Override
public String version() {
return VersionUtil.getVersion();
}
#Override
public void start(Map<String, String> map) {
//config = new AudioSourceConnectorConfig(map);
this.configProps = new HashMap(map);
//TODO: Add things you need to do to setup your connector.
}
#Override
public Class<? extends Task> taskClass() {
//TODO: Return your task implementation.
return AudioSourceTask.class;
}
#Override
public List<Map<String, String>> taskConfigs(int maxTasks) {
return (List) IntStream.range(0, maxTasks).mapToObj((i) -> {
return new HashMap(this.configProps);
}).collect(Collectors.toList());
//TODO: Define the individual task configurations that will be executed.
}
#Override
public void stop() {
//TODO: Do things that are necessary to stop your connector.
this.configProps=null;
}
#Override
public ConfigDef config() {
return AudioSourceConnectorConfig.conf();
}
}
AudioSourceTask
package org.othman.example;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.source.SourceTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class AudioSourceTask extends SourceTask {
static final Logger log = LoggerFactory.getLogger(AudioSourceTask.class);
AudioSourceConnectorConfig config;
private Process inputProcess;
byte [] audioFile;
#Override
public String version() {
return VersionUtil.getVersion();
}
#Override
public void start(Map<String, String> map) {
//TODO: Do things here that are required to start your task.
// This could be open a connection to a database, etc.
this.config = new AudioSourceConnectorConfig(map);
try{
audioFile = Files.readAllBytes(Paths.get(this.config.getFilenameConfig()));
// this.inputProcess = (new ProcessBuilder((new String[]{this.config.getFilenameConfig()}))).redirectError().start();
}
catch(IOException e){
System.out.println("ERROR WHILE TRYING TO READ AUDIO FILE...");
e.printStackTrace();
}
}
#Override
public List<SourceRecord> poll() throws InterruptedException {
//TODO: Create SourceRecord objects that will be sent the kafka cluster.
final ArrayList<SourceRecord> records = new ArrayList<>();
SourceRecord record;
for (int i=0;i < audioFile.length - 1;i++) {
record= new SourceRecord(null, null, this.config.getTopicConfig(), 0, Schema.BYTES_SCHEMA, audioFile[i]);
records.add(record);
}
return records;
}
#Override
public void stop() {
//TODO: Do whatever is required to stop your task.
}
}
I am writing data to accumulo storage natively using Geomesa Native Client. Here is my java code
package org.locationtech.geomesa.api;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.security.Authorizations;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.feature.AttributeTypeBuilder;
import org.geotools.geometry.jts.JTSFactoryFinder;
import org.junit.Assert;
import org.junit.Test;
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore;
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex;
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex$;
import org.locationtech.geomesa.utils.index.IndexMode$;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.filter.FilterFactory2;
import javax.annotation.Nullable;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
public class WorkerBeta {
public static void main(String[] args){
try {
DomainObjectValueSerializer dovs = new DomainObjectValueSerializer();
final GeoMesaIndex<DomainObject> index = AccumuloGeoMesaIndex.buildWithView(
"aj_v14",
"localhost:2181",
"hps",
"root", "9869547580",
false,
dovs,
new SimpleFeatureView<DomainObject>() {
AttributeTypeBuilder atb = new AttributeTypeBuilder();
private List<AttributeDescriptor> attributeDescriptors =
Lists.newArrayList(atb.binding(Integer.class).buildDescriptor("rId")
, atb.binding(String.class).buildDescriptor("dId")
, atb.binding(Integer.class).buildDescriptor("s")
, atb.binding(Integer.class).buildDescriptor("a")
, atb.binding(Integer.class).buildDescriptor("e")
);
#Override
public void populate(SimpleFeature f, DomainObject domainObject, String id, byte[] payload, Geometry geom, Date dtg) {
f.setAttribute("rId", domainObject.rideId);
f.setAttribute("dId", domainObject.deviceId);
f.setAttribute("s", domainObject.speed);
f.setAttribute("a", domainObject.angle);
f.setAttribute("e", domainObject.error);
}
#Override
public List<AttributeDescriptor> getExtraAttributes() {
return attributeDescriptors;
}
}
);
//Inserting
final DomainObject one = new DomainObject(1, "AJJASsP", 12, 40, 1);
final GeometryFactory gf = JTSFactoryFinder.getGeometryFactory();
System.out.println(index.insert(
one,
gf.createPoint(new Coordinate(-74.0, 34.0)),
date("2017-03-31T01:15:00.000Z")
));
//Read
GeoMesaQuery q = GeoMesaQuery.GeoMesaQueryBuilder.builder()
.within(-90.0, -180, 90, 180)
.during(date("2017-01-01T00:00:00.000Z"), date("2017-04-01T00:00:00.000Z"))
.build();
Iterable<DomainObject> results = index.query(q);
int counter = 0;
for(DomainObject dm : results){
counter += 1;
System.out.println("result counter: " + counter);
dovs.toBytes(dm);
}
}
catch (Exception ex){
ex.printStackTrace();
}
index.close();
}
public static class DomainObject {
public final int rideId;
public final String deviceId;
public final int angle;
public final int speed;
public final int error;
public DomainObject(int rideId, String deviceId, int angle, int speed, int error) {
this.rideId = rideId;
this.deviceId = deviceId;
this.angle = angle;
this.speed = speed;
this.error = error;
}
}
public static class DomainObjectValueSerializer implements ValueSerializer<DomainObject> {
public static final Gson gson = new Gson();
#Override
public byte[] toBytes(DomainObject o) {
return gson.toJson(o).getBytes();
}
#Override
public DomainObject fromBytes(byte[] bytes) {
return gson.fromJson(new String(bytes), DomainObject.class);
}
}
public static Date date(String s) {
return Date.from(ZonedDateTime.parse(s).toInstant());
}
}
The problem with this code is, I need to create index object every time for a new insert request and call index.close() to reflect the same. But I can't execute insert() agin, once index.close() is called. However i will be accepting insert request from queue at very high rate and I don't want to create index object every time. How can i do that?
In short how i can flush writes without calling close().
I created geomesa Client class file to use geomesa natively. Below is the partial implementation of the same which shows how you can flush with AccumuloAppendFeatureWriter without calling to close.
public class GeomesaClient {
private AccumuloDataStore ds = null;
private AccumuloAppendFeatureWriter fw = null;
private SimpleFeatureSource sfs = null;
private String tableName = "";
private FeatureStore fst = null;
private SimpleFeatureType sft;
public GeomesaClient(Map<String, String> dsConf) throws Exception {
this.ds = (AccumuloDataStore) DataStoreFinder.getDataStore(dsConf);
this.tableName = dsConf.get("tableName");
sft = createFeatureType();
if(!Arrays.asList(this.ds.getTypeNames()).contains(sft.getTypeName())){
ds.createSchema(sft);
}
this.fst = (FeatureStore)sfs;
this.fw = (AccumuloAppendFeatureWriter) (this.ds.getFeatureWriterAppend(sft.getTypeName(),
Transaction.AUTO_COMMIT));
this.sfs = ds.getFeatureSource(sft.getTypeName());
}
/*
Flush with AccumuloAppendFeatureWriter
*/
public void flush(boolean force) {
fw.flush();
}
}
I am currently working CDI pushevent. I plan to trigger the pushevent by backend Java code instead of frontend jsf by commandButton.
Here is the code for PushCdiBean.java
import java.io.Serializable;
import java.util.UUID;
import javax.annotation.PostConstruct;
import javax.enterprise.event.Event;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.inject.Inject;
import org.richfaces.application.push.MessageException;
import org.richfaces.application.push.TopicKey;
import org.richfaces.application.push.TopicsContext;
import org.richfaces.cdi.push.Push;
/**
* #author Lukas Fryc
*/
#javax.inject.Named("pushCdiBean")
#javax.enterprise.context.RequestScoped
//#ManagedBean(name="pushCdiBean")
//#ViewScoped
public class PushCdiBean implements Serializable {
/**
*
*/
private static final long serialVersionUID = -5241937306040858158L;
private static final String CDI_PUSH_TOPIC = "pushCdi";
private String userIdentifier;
private String message;
#Inject
#Push(topic=CDI_PUSH_TOPIC)//i thought that the topic is initialized with this ?!
private Event<String> pushEvent;
#PostConstruct
public void initialize() {
if(userIdentifier == null) {
userIdentifier = UUID.randomUUID().toString().replace("-", "");
}
TopicsContext topicsContext = TopicsContext.lookup();
topicsContext.getOrCreateTopic(new TopicKey(CDI_PUSH_TOPIC, userIdentifier));//initialize the topic and make the troublesome message disappears
}
public synchronized void sendMessage() throws MessageException {
pushEvent.fire(message);
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public Event<String> getPushEvent() {
return pushEvent;
}
public void setPushEvent(Event<String> pushEvent) {
this.pushEvent = pushEvent;
}
public String getUserIdentifier() {
return userIdentifier;
}
public void setUserIdentifier(String userIdentifier) {
this.userIdentifier = userIdentifier;
}
When I try to create the PushCdiBean class and call sendMessage() function, however, pushEvent.fire fails.
The code is here.
/**
*
*/
public void run() {
while (running) {
try {
PushCdiBean pushTest = new PushCdiBean();
pushTest.setMessage("This is CDI push Test");
pushTest.sendMessage();
}
catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
}
sleep(4000);
}
}
Could you please give me some suggestions? Thank you very much.
You have to #Inject the CDI bean, not instantiate it on your own via new.