private void handleWebSocketFrame(ChannelHandlerContext ctx, WebSocketFrame frame) {
// Check for closing frame
if (frame instanceof CloseWebSocketFrame) {
handshaker.close(ctx.channel(), (CloseWebSocketFrame) frame.retain());
return;
}
if (frame instanceof PingWebSocketFrame) {
ctx.channel().write(new PongWebSocketFrame(frame.content().retain()));
return;
}
if (!(frame instanceof TextWebSocketFrame)) {
throw new UnsupportedOperationException(String.format("%s frame types not supported", frame.getClass()
.getName()));
}
// Send the uppercase string back.
String request = ((TextWebSocketFrame) frame).text();
if (logger.isLoggable(Level.FINE)) {
logger.fine(String.format("%s received %s", ctx.channel(), request));
}
Message msg = new Message(ctx.channel(), request);
ReadQueueHandler.getInstance().addMessageToProcess(msg);
}
public class ReadQueueHandler implements Runnable {
private static int POOL_SIZE = 3;
private static ReadQueueHandler instance;
private final BlockingQueue<Message> messageQueue;
private final ExecutorService threadPool;
private final int threadPoolSize;
private final boolean isActive;
private ReadQueueHandler() {
this.threadPoolSize = POOL_SIZE;
this.threadPool = Executors.newFixedThreadPool(threadPoolSize);
this.messageQueue = new LinkedBlockingQueue<Message>();
isActive = true;
initThreadPool();
}
private void initThreadPool() {
for (int i = 0; i < this.threadPoolSize; i++) {
this.threadPool.execute(this);
}
}
/**
* Add message to read queue
*
* #param message
* - adding message
*/
public void addMessageToProcess(Message message) {
if (message != null) {
this.messageQueue.add(message);
}
}
#Override
public void run() {
while (isActive) {
Message message = null;
try {
message = this.messageQueue.take();
} catch (InterruptedException e) {
System.out.println("Exceptio " + e);
/*
* TODO Add logging
*/
}
if (message != null) {
Channel channel = message.getChannel();
channel.write(new TextWebSocketFrame("Message handled "));
}
}
}
public static ReadQueueHandler getInstance() {
if (instance == null) {
instance = new ReadQueueHandler();
}
return instance;
}
}
If i execute Channel.write("something") instead of adding data to queue, then all work fine and client get data. But if Channel.write("") execute from another thread, than data is not got. What can be reason? Channel write can not be execute from another thread?
For me it seems like you forgot to call flush() after the write is done to guaranteer it's flushed to the socket. For example you could fix this by use:
channel.writeAndFlush(new TextWebSocketFrame("Message handled "));
Related
I have a ServerSocket Thread that accepts a connection and starts a socket handler. This part seems to be working fine with no memory leaks or high cpu usage. I added a Publisher thread and an observer thread and now, my java program is reporting high CPU usage.
Subject.java:
public interface Subject {
public void attach(Observer o);
public void detach(Observer o);
public void notifyUpdate(Message m);
}
MessagePublisher.java:
public class MessagePublisher extends Thread implements Subject{
private List<Observer> observers = new ArrayList<>();
private boolean readyToPublish = false;
private ConcurrentLinkedQueue<Message> msgHolder;
public MessagePublisher(ConcurrentLinkedQueue<Message> _queue){
this.msgHolder = _queue;
}
#Override
public void attach(Observer o) {
observers.add(o);
}
#Override
public void detach(Observer o) {
observers.remove(o);
}
#Override
public void notifyUpdate(Message m) {
for(Observer o: observers) {
o.update(m);
}
}
public void run(){
this.readyToPublish = true;
while (readyToPublish)
{
try
{
Message _m = (Message)this.msgHolder.poll();
if(!_m.equals(null)){
System.out.println("Polled message: " + _m.getMessage());
System.out.println("Number of subscribers: " + observers.size());
notifyUpdate(_m);
}
}
catch(Exception j) { }
try { sleep(9); }
catch(Exception e) { }
}
EndWork();
}
public void EndWork(){
this.readyToPublish = false;
this.observers.clear();
}
}
Main.java:
public static void main(String[] args) {
// TODO code application logic here
ConcurrentLinkedQueue<Message> msgHolder = new ConcurrentLinkedQueue<Message>();
ServerSocketThread _socketThread = new ServerSocketThread(msgHolder);
_socketThread.start();
MessagePublisher _publisher = new MessagePublisher(msgHolder);
_publisher.start();
UIServerSocketThread _uiSocketThread = new UIServerSocketThread(_publisher);
_uiSocketThread.start();
}
UIServerSocketThread.java:
public class UIServerSocketThread extends Thread{
private ServerSocket objServerSocket;
private Socket objSocket;
private int iPort = 21001;
private FileHandler obj_LogFileHandler;
private Logger obj_Logger;
private int file_size = 8000000;
private int numLogFiles = 20;
private UIClientSocketThread objCltSocket;
private MessagePublisher objPublisher;
private boolean running = false;
public UIServerSocketThread(MessagePublisher _publisher){
this.running = true;
try {
this.obj_LogFileHandler = new FileHandler("uiserver.log.%g", file_size, numLogFiles);
this.obj_LogFileHandler.setFormatter(new SimpleFormatter());
}
catch ( IOException obj_Exception ) {
this.obj_LogFileHandler = null;
}
catch ( SecurityException obj_Exception ) {
this.obj_LogFileHandler = null;
}
this.obj_Logger = null;
if ( this.obj_LogFileHandler != null ) {
this.obj_Logger = Logger.getLogger("eti.logger.uiserver");
this.obj_Logger.addHandler(this.obj_LogFileHandler);
}
try {
this.objServerSocket = new ServerSocket(this.iPort);
} catch(IOException i){
//i.printStackTrace();
this.obj_Logger.info(i.getMessage());
}
this.objPublisher = _publisher;
}
public void run() {
StringBuffer sMsg;
sMsg = new StringBuffer();
while ( this.running ) {
try {
this.objSocket = this.objServerSocket.accept();
} catch(Exception e){
sMsg.append("Error accepting ui socket connection\n");
sMsg.append(e.getMessage());
this.obj_Logger.info(sMsg.toString());
}
try {
this.objCltSocket = new UIClientSocketThread(this.objSocket, this.obj_Logger);
if(!this.objPublisher.equals(null)){
this.obj_Logger.info("Attacing clientObserver");
this.objPublisher.attach(this.objCltSocket);
}
this.objCltSocket.start();
} catch(Exception r) {
sMsg.append("Error \n");
sMsg.append(r.getMessage());
this.obj_Logger.info(sMsg.toString());
}
}
this.objPublisher.EndWork();
stopServerSocketThread();
} // end run
public void stopServerSocketThread() {
try {
this.running = false;
this.objServerSocket.close();
this.objServerSocket = null;
this.obj_Logger.info("Server NOT ACCEPTING Connections!!");
} catch(Exception e ) {
this.obj_Logger.info(e.getMessage());
}
}
}
I am not sure where the issue is. Any help is appreciated.
You have two infinite loops in your application. One is in class MessagePublisher with function run(). And another one is in class UIServerSocketThread. This will cause high CPU usage.
Lets consider following code:
Client code:
public class MyClient {
private final MyClientSideService myClientSideService;
public MyClient(MyClientSideService myClientSideService) {
this.myClientSideService = myClientSideService;
}
public String requestRow(Integer req) {
return myClientSideService.requestSingleRow(req);
}
}
Client side service:
public class MyClientSideService {
private final MyServerSideService myServerSideService;
public MyClientSideService(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
}
public String requestSingleRow(int req) {
return myServerSideService.requestRowBatch(Arrays.asList(req)).get(0);
}
}
Server side service:
#Slf4j
public class MyServerSideService {
//single threaded bottleneck service
public synchronized List<String> requestRowBatch(List<Integer> batchReq) {
log.info("Req for {} started");
try {
Thread.sleep(100);
return batchReq.stream().map(String::valueOf).collect(Collectors.toList());
} catch (InterruptedException e) {
return null;
} finally {
log.info("Req for {} finished");
}
}
}
And main:
#Slf4j
public class MainClass {
public static void main(String[] args) {
MyClient myClient = new MyClient(new MyClientSideService(new MyServerSideService()));
for (int i = 0; i < 20; i++) {
new Thread(() -> {
for (int m = 0; m < 100; m++) {
int k = m;
log.info("Response is {}", myClient.requestRow(k));
}
}).start();
}
}
}
According the logs it takes approximately 4 min 22 sec but it too much. Ithink it might be improved dramatically. I would like to implement implicit batching. So MyClientSideService should collect requests and when it becomes 50(it is preconfigured batch size) or some preconfigured timeout expired then to request MyServerSideService and back route result to the clients. Protocol should be synchronous so clients must be blocked until result getting.
I tried to write code using CountDownLatches and CyclicBarriers but my attempts were far from success.
How can I achieve my goal?
P.S.
If to replace requestRowBatch return type List<String> from to Map<Integer, String> to delegate request and response mapping to server following works with limititations. It works only if I send <=25 requests
#Slf4j
public class MyClientSideService {
private final Integer batchSize = 25;
private final Integer maxTimeoutMillis = 5000;
private final MyServerSideService myServerSideService;
private final Queue<Integer> queue = new ArrayBlockingQueue(batchSize);
private final Map<Integer, String> responseMap = new ConcurrentHashMap();
private final AtomicBoolean started = new AtomicBoolean();
private CountDownLatch startBatchRequestLatch = new CountDownLatch(batchSize);
private CountDownLatch awaitBatchResponseLatch = new CountDownLatch(1);
public MyClientSideService(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
}
public String requestSingleRow(int req) {
queue.offer(req);
if (!started.compareAndExchange(false, true)) {
log.info("Start batch collecting");
startBatchCollecting();
}
startBatchRequestLatch.countDown();
try {
log.info("Awaiting batch response latch for {}...", req);
awaitBatchResponseLatch.await();
log.info("Finished awaiting batch response latch for {}...", req);
return responseMap.get(req);
} catch (InterruptedException e) {
e.printStackTrace();
return "EXCEPTION";
}
}
private void startBatchCollecting() {
new Thread(() -> {
try {
log.info("Await startBatchRequestLatch");
startBatchRequestLatch.await(maxTimeoutMillis, TimeUnit.MILLISECONDS);
log.info("await of startBatchRequestLatch finished");
} catch (InterruptedException e) {
e.printStackTrace();
}
responseMap.putAll(requestBatch(queue));
log.info("Releasing batch response latch");
awaitBatchResponseLatch.countDown();
}).start();
}
public Map<Integer, String> requestBatch(Collection<Integer> requestList) {
return myServerSideService.requestRowBatch(requestList);
}
}
Update
According Malt answer I was able to develop following:
#Slf4j
public class MyClientSideServiceCompletableFuture {
private final Integer batchSize = 25;
private final Integer maxTimeoutMillis = 5000;
private final MyServerSideService myServerSideService;
private final Queue<Pair<Integer, CompletableFuture>> queue = new ArrayBlockingQueue(batchSize);
private final AtomicInteger counter = new AtomicInteger(0);
private final Lock lock = new ReentrantLock();
public MyClientSideServiceCompletableFuture(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
}
public String requestSingleRow(int req) {
CompletableFuture<String> future = new CompletableFuture<>();
lock.lock();
try {
queue.offer(Pair.of(req, future));
int counter = this.counter.incrementAndGet();
if (counter != 0 && counter % batchSize == 0) {
log.info("request");
List<Integer> requests = queue.stream().map(p -> p.getKey()).collect(Collectors.toList());
Map<Integer, String> serverResponseMap = requestBatch(requests);
queue.forEach(pair -> {
String response = serverResponseMap.get(pair.getKey());
CompletableFuture<String> value = pair.getValue();
value.complete(response);
});
queue.clear();
}
} finally {
lock.unlock();
}
try {
return future.get();
} catch (Exception e) {
return "Exception";
}
}
public Map<Integer, String> requestBatch(Collection<Integer> requestList) {
return myServerSideService.requestRowBatch(requestList);
}
}
But it doesn't work if size is not multiple of batch size
If to replace requestRowBatch return type from List<String> with Map<Integer, String> to delegate request and response mapping to server I was able to crete following solution:
#Slf4j
public class MyClientSideServiceCompletableFuture {
private final Integer batchSize = 25;
private final Integer timeoutMillis = 5000;
private final MyServerSideService myServerSideService;
private final BlockingQueue<Pair<Integer, CompletableFuture>> queue = new LinkedBlockingQueue<>();
private final Lock lock = new ReentrantLock();
private final Condition requestAddedCondition = lock.newCondition();
public MyClientSideServiceCompletableFuture(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
startQueueDrainer();
}
public String requestSingleRow(int req) {
CompletableFuture<String> future = new CompletableFuture<>();
while (!queue.offer(Pair.of(req, future))) {
log.error("Can't add {} to the queue. Retrying...", req);
}
lock.lock();
try {
requestAddedCondition.signal();
} finally {
lock.unlock();
}
try {
return future.get();
} catch (Exception e) {
return "Exception";
}
}
private void startQueueDrainer() {
new Thread(() -> {
log.info("request");
while (true) {
ArrayList<Pair<Integer, CompletableFuture>> requests = new ArrayList<>();
if (queue.drainTo(requests, batchSize) > 0) {
log.info("drained {} items", requests.size());
Map<Integer, String> serverResponseMap = requestBatch(requests.stream().map(Pair::getKey).collect(Collectors.toList()));
requests.forEach(pair -> {
String response = serverResponseMap.get(pair.getKey());
CompletableFuture<String> value = pair.getValue();
value.complete(response);
});
} else {
lock.lock();
try {
while (queue.size() == 0) {
try {
log.info("Waiting on condition");
requestAddedCondition.await(timeoutMillis, TimeUnit.MILLISECONDS);
log.info("Waking up on condition");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} finally {
lock.unlock();
}
}
}
}).start();
}
public Map<Integer, String> requestBatch(Collection<Integer> requestList) {
return myServerSideService.requestRowBatch(requestList);
}
}
It looks like a working solution. But I am not sure if it is optimal.
Your MyClientSideServiceCompletableFuture solution, will send the requests to the server every time you add something to the queue and doesnt wait for requests to be batch sized. You are using BlockingQueue and adding the uneccessary blocking condition and locks. BlockingQueue has blocking-timeout capabilites so no addition condition is neccessary.
You can simplify your solution like this:
It sends requests to server only when the batch is full or the timeout passed and batch is not empty.
private void startQueueDrainer() {
new Thread(() -> {
log.info("request");
ArrayList<Pair<Integer, CompletableFuture>> batch = new ArrayList<>(batchSize);
while (true) {
try {
batch.clear(); //clear batch
long timeTowWait = timeoutMillis;
long startTime = System.currentTimeMillis();
while (timeTowWait > 0 && batch.size() < batchSize) {
Pair<Integer, CompletableFuture> request = queue.poll(timeTowWait , TimeUnit.MILLISECONDS);
if(request != null){
batch.add(request);
}
long timeSpent = (System.currentTimeMillis() - startTime);
timeTowWait = timeTowWait - timeSpent;
}
if (!batch.isEmpty()) {
// we wait at least timeoutMillis or batch is full
log.info("send {} requests to server", batch.size());
Map<Integer, String> serverResponseMap = requestBatch(batch.stream().map(Pair::getKey).collect(Collectors.toList()));
batch.forEach(pair -> {
String response = serverResponseMap.get(pair.getKey());
CompletableFuture<String> value = pair.getValue();
value.complete(response);
});
} else {
log.info("We wait {} but the batch is still empty", System.currentTimeMillis() - startTime);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
}
Change the method requestSingleRow to not use lock
public String requestSingleRow(int req) {
CompletableFuture<String> future = new CompletableFuture<>();
while (!queue.offer(Pair.of(req, future))) {
log.error("Can't add {} to the queue. Retrying...", req);
}
try {
return future.get();
} catch (Exception e) {
return "Exception";
}
}
You could use CompletableFuture.
Have threads calling MyClientSideService put their request in a Queue (possibly BlockingQueue, and get a new CompletableFuture in return. The calling thread can call CompletableFuture.get() to block until a result is ready, or go on doing other things.
That CompletableFuture will be stored together with the request in MyClientSideService. When you reach 50 requests (and therefore 50 CompletableFuture instances), have the client service send the batch request.
When the request is complete, use the CompletableFuture.complete(value) method of each ComplatableFuture instance in the queue to notify the client thread that the response is ready. This will unblock the client if it has called blocking method like CompletableFuture.get(), or make it return instantly with value if called later.
I'm making a multithreaded FTP server in Java and I have a question about to go about handling multiple clients. Right now, I have a Server that looks like this:
public class Server {
private static final int HOST_PORT = 6000;
private ServerSocket serverSocket;
public Server(ServerModel serverModel) throws IOException {
serverSocket = new ServerSocket(HOST_PORT);
}
public void start() {
try {
acceptClients();
} catch (IOException e) {
e.printStackTrace();
}
}
private void acceptClients() throws IOException {
while (true) {
Socket client = serverSocket.accept();
ServerModel serverModel = new ServerModel();
Thread worker = new Thread(new ServerWorker(client, serverModel));
worker.start();
}
}
}
And a ServerWorker object which is responsible for interpreting client commands and responding to them:
public class ServerWorker implements Runnable {
private ServerRemoteHandler serverRemoteHandler;
private ServerModel serverModel;
private static final int GET_CODE = 1;
private static final int PUSH_CODE = 2;
private static final int CHANGE_DIRECTORY_CODE = 3;
private static final int PRINT_WORKING_DIRECTORY_CODE = 4;
private static final int FILE_EXISTS_CODE = 5;
private static final int LIST_FILES_DIRECTORIES_CODE = 6;
private static final int EXIT_CODE = 0;
public ServerWorker(Socket client, ServerModel serverModel) throws IOException {
this.serverModel = serverModel;
try {
serverRemoteHandler = new ServerRemoteHandler(client);
} catch (IOException e) {
e.printStackTrace();
}
}
private void parseCommand() {
int command;
try {
while (true) {
command = serverRemoteHandler.getCommand();
switch (command) {
case CHANGE_DIRECTORY_CODE:
changeDirectory();
break;
case PRINT_WORKING_DIRECTORY_CODE:
printWorkingDirectory();
break;
case FILE_EXISTS_CODE:
fileExists();
break;
case LIST_FILES_DIRECTORIES_CODE:
listFilesDirectories();
break;
case GET_CODE:
pushFile();
break;
case PUSH_CODE:
getFile();
break;
case EXIT_CODE:
exit();
break;
}
}
} catch (IOException e) {
exit();
}
}
private void printWorkingDirectory() throws IOException {
serverRemoteHandler.printWorkingDirectory(serverModel.getCurrentPath());
}
private void changeDirectory() throws IOException {
String fileName = serverRemoteHandler.getFileName();
boolean success = serverModel.changeDirectory(fileName);
serverRemoteHandler.changeDirectory(success);
}
private void fileExists() throws IOException {
String fileName = serverRemoteHandler.getFileName();
serverRemoteHandler.fileExists(serverModel.fileExists(fileName));
}
private void pushFile() throws IOException {
File file = serverModel.getFile(serverRemoteHandler.getFileName());
long fileSize = serverModel.getFileSize(file);
serverRemoteHandler.pushFile(file, fileSize);
}
private void listFilesDirectories() throws IOException {
serverRemoteHandler.listFilesDirectories(serverModel.listFilesDirectories());
}
private void getFile() throws IOException {
String fileName = serverRemoteHandler.getFileName();
File file = new File(serverModel.getCurrentPath() + File.separator + fileName);
serverRemoteHandler.getFile(file);
}
private void exit() {
serverRemoteHandler.exit();
}
#Override
public void run() {
while (true) {
parseCommand();
}
}
}
Each ServerWorker has an Object called RemoteHandler whose job is to send and receive information from the streams. My questions are regarding this line:
Thread worker = new Thread(new ServerWorker(client, serverModel));
Is this thread safe? Is this a good way of implementing it? Thank you.
Yes it is threadsafe. Everything in that line is method-local, so it is only accessible by the current thread, and then the worker thread starts while this one loops and gets a new set of values for these variables.
(That doesn't imply the entire program is thread-safe, of course.)
This is the SocketServer code that generates a server thread
public class ProcessorCorresponder {
protected final static Logger logger = LogManager.getLogger( ProcessorCorresponder.class );
private static int port = Integer.parseInt(PropertiesLoader.getProperty("appserver.port") == null ? "666" : PropertiesLoader.getProperty("appserver.port"));
private static int maxConnections = Integer.parseInt(PropertiesLoader.getProperty("appserver.maxconnections") == null ? "666" : PropertiesLoader.getProperty("appserver.maxconnections"));
public static void main(String[] args) {
logger.info("Starting server .. "
+ "[port->" + port
+ ",databaseName->" + databaseName + "]");
try (ServerSocket listener = new ServerSocket();) {
listener.setReuseAddress(true);
listener.bind(new InetSocketAddress(port));
Socket server;
int i = 0;
while((i++ < maxConnections) || (maxConnections == 0)) {
server = listener.accept();
logger.debug(
"New Thread listening on " + server.getLocalAddress().toString() + ":" + server.getLocalPort()
+ ", initiated from IP => " + server.getInetAddress().toString() + ":" + server.getPort()
);
MySocketServer socSrv = new MySocketServer (server);
Thread t = new Thread( socSrv );
t.start();
}
} catch (Exception ex) {
logger.error("Error in ProcessorInterface", ex);
}
}
}
Server code: This is a thread to handle one connection, there is a program that monitors a serversocket and spins off request threads as needed.
public class MySocketServer implements Runnable {
protected final static Logger logger = LogManager.getLogger(MySocketServer.class);
private final Socket server;
// because we are using threads, we must make this volatile, or the class will
// never exit.
private volatile boolean shouldContinue = true;
private StringBuffer buffHeartbeatMessage = new StringBuffer().append((char) 0).append((char) 0).append((char) 0)
.append((char) 0).append((char) 0).append((char) 0);
private Heartbeat heartbeat = new Heartbeat(/* 60 */3000, buffHeartbeatMessage.toString());
public MySocketServer(Socket server) {
this.server = server;
}
#Override
public void run() {
try (BufferedReader in = new BufferedReader(new InputStreamReader(this.server.getInputStream()));
BufferedOutputStream out = new HeartbeatBufferedOutputStream(this.server.getOutputStream(),
heartbeat)) {
final StreamListener listener = new StreamListener(in);
listener.addActionListener(new ActionListener() {
#Override
public void actionPerformed(ActionEvent event) {
if (event.getID() == ActionEvent.ACTION_PERFORMED) {
if (event.getActionCommand().equals(StreamListener.ERROR)) {
logger.error("Problem listening to stream.");
listener.setShouldContinue(false);
stopRunning();
} else {
String messageIn = event.getActionCommand();
if (messageIn == null) { // End of Stream;
stopRunning();
} else { // hey, we can do what we were meant for
logger.debug("Request received from client");
// doing stuff here
...
// done doing stuff
logger.debug("Sending Client Response");
try {
sendResponse(opResponse, out);
} catch (Exception ex) {
logger.error("Error sending response to OP.", ex);
}
}
}
}
}
});
listener.start();
while (shouldContinue) {
// loop here until shouldContinue = false;
// this should be set to false in the ActionListener above
}
heartbeat.setShouldStop(true);
return;
} catch (Exception ex) {
logger.error("Error in ESPSocketServer", ex);
return;
}
}
private void stopRunning() {
shouldContinue = false;
}
private void sendResponse(ClientResponse opResponse, BufferedOutputStream out) throws Exception {
logger.debug("Before write");
out.write(opResponse.getResponse().getBytes());
logger.debug("After write. Before flush");
out.flush();
logger.debug("After flush");
// this log message is in my logs, so I know the message was sent
}
}
My StreamListener class.
public class StreamListener extends Thread {
protected final static Logger logger = LogManager.getLogger(StreamListener.class);
public final static String ERROR = "ERROR";
private BufferedReader reader = null;
private List<ActionListener> actionListeners = new ArrayList<>();
private boolean shouldContinue = true;
public StreamListener(BufferedReader reader) {
this.reader = reader;
}
#Override
public void run() {
while (shouldContinue) {
String message;
try {
// client blocks here and never receives message
message = reader.readLine();
ActionEvent event = new ActionEvent(this, ActionEvent.ACTION_PERFORMED, message);
fireActionPerformed(event);
} catch (IOException e) {
e.printStackTrace();
ActionEvent event = new ActionEvent(this, ActionEvent.ACTION_PERFORMED, ERROR);
fireActionPerformed(event);
}
}
}
public void setShouldContinue(boolean shouldContinue) {
this.shouldContinue = shouldContinue;
}
public boolean getShouldContinue() {
return shouldContinue;
}
public boolean addActionListener(ActionListener listener) {
return actionListeners.add(listener);
}
public boolean removeActionListener(ActionListener listener) {
return actionListeners.remove(listener);
}
private void fireActionPerformed(ActionEvent event) {
for (ActionListener listener : actionListeners) {
listener.actionPerformed(event);
}
}
}
My Heartbeat class
public class Heartbeat extends Thread {
private BufferedOutputStream bos = null;
private int beatDelayMS = 0;
private String message = null;
private boolean shouldStop = false;
public Heartbeat(int beatDelayMS, String message) {
this.beatDelayMS = beatDelayMS;
this.message = message;
setDaemon(true);
}
#Override
public void run() {
if (bos == null) { return; }
while(!shouldStop) {
try {
sleep(beatDelayMS);
try {
bos.write(message.getBytes());
bos.flush();
} catch (IOException ex) {
// fall thru
}
} catch (InterruptedException ex) {
if (shouldStop) {
return;
}
}
}
}
public void setBufferedOutputStream(BufferedOutputStream bos) {
this.bos = bos;
}
public BufferedOutputStream getBufferedOutputStream() {
return bos;
}
public void setShouldStop(boolean shouldStop) {
this.shouldStop = shouldStop;
}
public boolean getShouldStop() {
return shouldStop;
}
}
My HeartbeatBufferedOutputStream
public class HeartbeatBufferedOutputStream extends BufferedOutputStream {
private Heartbeat heartbeat = null;
public HeartbeatBufferedOutputStream(OutputStream out, Heartbeat heartbeat) {
super(out);
this.heartbeat = heartbeat;
this.heartbeat.setBufferedOutputStream(this);
heartbeat.start();
}
#Override
public synchronized void flush() throws IOException {
super.flush();
heartbeat.interrupt();
}
}
And finally here is the "Client" class
public class Mockup extends Thread {
protected final static Logger logger = LogManager.getLogger(Mockup.class);
// because we are using threads, we must make this volatile, or the class will
// never exit.
private volatile boolean shouldContinue = true;
public static void main(String[] args) {
new Mockup().start();
}
#Override
public void run() {
try (Socket socket = new Socket("localhost", 16100);
BufferedOutputStream out = new BufferedOutputStream(socket.getOutputStream());
BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream()));) {
final StreamListener listener = new StreamListener(in);
listener.addActionListener(new ActionListener() {
#Override
public void actionPerformed(ActionEvent event) {
if (event.getID() == ActionEvent.ACTION_PERFORMED) {
if (event.getActionCommand().equals(StreamListener.ERROR)) {
logger.error("Problem listening to stream.");
listener.setShouldContinue(false);
stopRunning();
} else {
String messageIn = event.getActionCommand();
if (messageIn == null) { // End of Stream;
stopRunning();
} else { // hey, we can do what we were meant for
// convert the messageIn to an OrderPower request, this parses the information
logger.info("Received message from server. [" + messageIn + "].");
}
}
}
}
});
listener.start();
StringBuffer buff = new StringBuffer("Some message to send to server");
logger.info("Sending message to server [" + buff.toString() + "]");
out.write(buff.toString().getBytes());
out.flush();
boolean started = false;
while (shouldContinue) {
if (!started) {
logger.debug("In loop");
started = true;
}
// loop here until shouldContinue = false;
// this should be set to false in the ActionListener above
}
logger.info("Exiting Mockup");
return;
} catch (Exception ex) {
logger.error("Error running MockupRunner", ex);
}
}
private void stopRunning() {
shouldContinue = false;
}
}
I have confirmed from logging messages that the Server sends a message to the BufferedOutputStream, and is flushed, but the Client logs indicate that it is blocked on the reader.readLine() and never gets the message.
You are reading lines but you are never writing lines. Add a line terminator to what you send.
Has anyone tried Servlets 3.1 non blocking technique on tomcat?
The request from the browser seems to be waiting forever but when I run the server in debug mode, the call returns but still I don't see "Data read.." and "Data written.." in the logs.
Servlet:
#WebServlet(urlPatterns = "/asyncn", asyncSupported = true)
public class AsyncN extends HttpServlet {
private static final long serialVersionUID = 1L;
#Override
protected void service(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
println("Before starting job");
final AsyncContext actx = request.startAsync();
actx.setTimeout(Long.MAX_VALUE);
actx.start(new HeavyTask(actx));
println("After starting job");
}
class HeavyTask implements Runnable {
AsyncContext actx;
HeavyTask(AsyncContext actx) {
this.actx = actx;
}
#Override
public void run() {
try {
Thread.currentThread().setName("Job-Thread-" + actx.getRequest().getParameter("job"));
// set up ReadListener to read data for processing
ServletInputStream input = actx.getRequest().getInputStream();
ReadListener readListener = new ReadListenerImpl(input, actx);
input.setReadListener(readListener);
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void println(String output) {
System.out.println("[" + Thread.currentThread().getName() + "]" + output);
}
}
Listeners:
public class ReadListenerImpl implements ReadListener {
private ServletInputStream input = null;
private AsyncContext actx = null;
// store the processed data to be sent back to client later
private Queue<String> queue = new LinkedBlockingQueue<>();
ReadListenerImpl(ServletInputStream input, AsyncContext actx) {
this.input = input;
this.actx = actx;
}
#Override
public void onDataAvailable() throws IOException {
println("Data is now available, starting to read");
StringBuilder sb = new StringBuilder();
int len = -1;
byte b[] = new byte[8];
// We need to check input#isReady before reading data.
// The ReadListener will be invoked again when
// the input#isReady is changed from false to true
while (input.isReady() && (len = input.read(b)) != -1) {
String data = new String(b, 0, len);
sb.append(data);
}
println("Data read: "+sb.toString());
queue.add(sb.toString());
}
#Override
public void onAllDataRead() throws IOException {
println("All Data read, now invoking write listener");
// now all data are read, set up a WriteListener to write
ServletOutputStream output = actx.getResponse().getOutputStream();
WriteListener writeListener = new WriteListenerImpl(output, queue, actx);
output.setWriteListener(writeListener);
}
#Override
public void onError(Throwable throwable) {
println("onError");
actx.complete();
throwable.printStackTrace();
}
public static void println(String output) {
System.out.println("[" + Thread.currentThread().getName() + "]" + output);
}
}
public class WriteListenerImpl implements WriteListener {
private ServletOutputStream output = null;
private Queue<String> queue = null;
private AsyncContext actx = null;
WriteListenerImpl(ServletOutputStream output, Queue<String> queue, AsyncContext actx) {
this.output = output;
this.queue = queue;
this.actx = actx;
}
#Override
public void onWritePossible() throws IOException {
println("Ready to write, writing data");
// write while there is data and is ready to write
while (queue.peek() != null && output.isReady()) {
String data = queue.poll();
//do some processing here with the data
try {
data = data.toUpperCase();
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
println("Data written: "+data);
output.print(data);
}
// complete the async process when there is no more data to write
if (queue.peek() == null) {
actx.complete();
}
}
#Override
public void onError(Throwable throwable) {
println("onError");
actx.complete();
throwable.printStackTrace();
}
public static void println(String output) {
System.out.println("[" + Thread.currentThread().getName() + "]" + output);
}
}
Sysout logs:
[http-nio-8080-exec-4]Before starting job
[http-nio-8080-exec-4]After starting job
Sysout logs (when I run the server in debug mode):
[http-nio-8080-exec-6]Before starting job
[http-nio-8080-exec-6]After starting job
[http-nio-8080-exec-6]All Data read, now invoking write listener
[http-nio-8080-exec-6]Ready to write, writing data
Creating the new thread is unnecessary, set the readListener from the service method and everything will work asynchronously.
Couple of comments on your code. In the readListener you have:
while (input.isReady() && (len = input.read(b)) != -1)
would suggest instead using this to stick fully with the asynchronous api:
while (input.isReady() && !input.isFinished())
Also for your write listener you have:
while (queue.peek() != null && output.isReady())
you should reverse the conditionals to:
while (output.isReady() && queue.peek() != null)
this protects against calling ac.complete() early if the very last write goes asynchronous.