java 8
my snippet:
private ThreadedApplicationEventPublisher threadedApplicationEventPublisher;
private final ExecutorService executorService = Executors.newSingleThreadExecutor();
private final List<Long> failedAttemptsList = new
LinkedList<>();
private final Semaphore alarmRaised = new Semaphore(1);
Future<Boolean> future = executorService.submit(new ClearAlarmAndResetAttemptsCallable());
if (future.get().booleanValue()) {
// some code here
}
//...
public class ClearAlarmAndResetAttemptsCallable implements Callable<Boolean> {
#Override
public Boolean call() throws Exception {
LOGGER.info("ClearAlarmAndResetAttemptsCallable: start");
while (true) {
long currentTimeSec = System.currentTimeMillis() / 1000;
LOGGER.info(
"ClearAlarmAndResetAttemptsCallable: in_while, currentTimeSec = {}, failedAttemptsList_size = {}",
currentTimeSec, failedAttemptsList.size());
synchronized (failedAttemptsList) {
LOGGER.info("ClearAlarmAndResetAttemptsCallable: inside_synchronized");
Long lastTimeSec = ((LinkedList<Long>) failedAttemptsList).getLast();
long durationSec = currentTimeSec - lastTimeSec;
if (durationSec > timeWindowSec) {
threadedApplicationEventPublisher.publishEvent(new PossibleMCAttackEventPostEvent(this,
folderService.findNetwork(), EventSeverity.NORMAL, DateUtils.getGmtTime(),
AttackTypeEnum.REPETITIVE_FAILED_LOGON_ATTEMPTS, ""));
failedAttemptsList.clear();
break;
}
}
try {
Thread.sleep(timeWindowSec * 100L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
alarmRaised.release();
return true;
}
}
here result log
ClearAlarmAndResetAttemptsCallable: start
ClearAlarmAndResetAttemptsCallable: in_while, currentTimeSec = 1648120833, failedAttemptsList_size = 3
The question is: Why not print log
ClearAlarmAndResetAttemptsCallable: inside_synchronized
?
Related
Below is the code implementation...where I need to schedule tasks at random interval and update the time of the particular tasks :
class Task implements Callable<String>
{
private final String name;
public Task(String name) {
this.name = name;
}
#Override
public String call() throws Exception {
return "Task [" + name + "] executed on : " + LocalDateTime.now().toString();
}
}
public class Main
{
public static void main(String[] args) throws InterruptedException
{
ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
List<ScheduledFuture<String>> results = new ArrayList<ScheduledFuture<String>>();
for (int i = 1; i <= 5; i++)
{
Task task = new Task("Task-" + i);
ScheduledFuture<String> result = executor.schedule(task, i*2, TimeUnit.SECONDS);
results.add(result);
}
executor.shutdown();
try {
executor.awaitTermination(1, TimeUnit.DAYS);
for(ScheduledFuture<String> result : results) {
System.out.println(result.get());
}
} catch (Exception e) {
e.printStackTrace();
}
Lets consider following code:
Client code:
public class MyClient {
private final MyClientSideService myClientSideService;
public MyClient(MyClientSideService myClientSideService) {
this.myClientSideService = myClientSideService;
}
public String requestRow(Integer req) {
return myClientSideService.requestSingleRow(req);
}
}
Client side service:
public class MyClientSideService {
private final MyServerSideService myServerSideService;
public MyClientSideService(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
}
public String requestSingleRow(int req) {
return myServerSideService.requestRowBatch(Arrays.asList(req)).get(0);
}
}
Server side service:
#Slf4j
public class MyServerSideService {
//single threaded bottleneck service
public synchronized List<String> requestRowBatch(List<Integer> batchReq) {
log.info("Req for {} started");
try {
Thread.sleep(100);
return batchReq.stream().map(String::valueOf).collect(Collectors.toList());
} catch (InterruptedException e) {
return null;
} finally {
log.info("Req for {} finished");
}
}
}
And main:
#Slf4j
public class MainClass {
public static void main(String[] args) {
MyClient myClient = new MyClient(new MyClientSideService(new MyServerSideService()));
for (int i = 0; i < 20; i++) {
new Thread(() -> {
for (int m = 0; m < 100; m++) {
int k = m;
log.info("Response is {}", myClient.requestRow(k));
}
}).start();
}
}
}
According the logs it takes approximately 4 min 22 sec but it too much. Ithink it might be improved dramatically. I would like to implement implicit batching. So MyClientSideService should collect requests and when it becomes 50(it is preconfigured batch size) or some preconfigured timeout expired then to request MyServerSideService and back route result to the clients. Protocol should be synchronous so clients must be blocked until result getting.
I tried to write code using CountDownLatches and CyclicBarriers but my attempts were far from success.
How can I achieve my goal?
P.S.
If to replace requestRowBatch return type List<String> from to Map<Integer, String> to delegate request and response mapping to server following works with limititations. It works only if I send <=25 requests
#Slf4j
public class MyClientSideService {
private final Integer batchSize = 25;
private final Integer maxTimeoutMillis = 5000;
private final MyServerSideService myServerSideService;
private final Queue<Integer> queue = new ArrayBlockingQueue(batchSize);
private final Map<Integer, String> responseMap = new ConcurrentHashMap();
private final AtomicBoolean started = new AtomicBoolean();
private CountDownLatch startBatchRequestLatch = new CountDownLatch(batchSize);
private CountDownLatch awaitBatchResponseLatch = new CountDownLatch(1);
public MyClientSideService(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
}
public String requestSingleRow(int req) {
queue.offer(req);
if (!started.compareAndExchange(false, true)) {
log.info("Start batch collecting");
startBatchCollecting();
}
startBatchRequestLatch.countDown();
try {
log.info("Awaiting batch response latch for {}...", req);
awaitBatchResponseLatch.await();
log.info("Finished awaiting batch response latch for {}...", req);
return responseMap.get(req);
} catch (InterruptedException e) {
e.printStackTrace();
return "EXCEPTION";
}
}
private void startBatchCollecting() {
new Thread(() -> {
try {
log.info("Await startBatchRequestLatch");
startBatchRequestLatch.await(maxTimeoutMillis, TimeUnit.MILLISECONDS);
log.info("await of startBatchRequestLatch finished");
} catch (InterruptedException e) {
e.printStackTrace();
}
responseMap.putAll(requestBatch(queue));
log.info("Releasing batch response latch");
awaitBatchResponseLatch.countDown();
}).start();
}
public Map<Integer, String> requestBatch(Collection<Integer> requestList) {
return myServerSideService.requestRowBatch(requestList);
}
}
Update
According Malt answer I was able to develop following:
#Slf4j
public class MyClientSideServiceCompletableFuture {
private final Integer batchSize = 25;
private final Integer maxTimeoutMillis = 5000;
private final MyServerSideService myServerSideService;
private final Queue<Pair<Integer, CompletableFuture>> queue = new ArrayBlockingQueue(batchSize);
private final AtomicInteger counter = new AtomicInteger(0);
private final Lock lock = new ReentrantLock();
public MyClientSideServiceCompletableFuture(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
}
public String requestSingleRow(int req) {
CompletableFuture<String> future = new CompletableFuture<>();
lock.lock();
try {
queue.offer(Pair.of(req, future));
int counter = this.counter.incrementAndGet();
if (counter != 0 && counter % batchSize == 0) {
log.info("request");
List<Integer> requests = queue.stream().map(p -> p.getKey()).collect(Collectors.toList());
Map<Integer, String> serverResponseMap = requestBatch(requests);
queue.forEach(pair -> {
String response = serverResponseMap.get(pair.getKey());
CompletableFuture<String> value = pair.getValue();
value.complete(response);
});
queue.clear();
}
} finally {
lock.unlock();
}
try {
return future.get();
} catch (Exception e) {
return "Exception";
}
}
public Map<Integer, String> requestBatch(Collection<Integer> requestList) {
return myServerSideService.requestRowBatch(requestList);
}
}
But it doesn't work if size is not multiple of batch size
If to replace requestRowBatch return type from List<String> with Map<Integer, String> to delegate request and response mapping to server I was able to crete following solution:
#Slf4j
public class MyClientSideServiceCompletableFuture {
private final Integer batchSize = 25;
private final Integer timeoutMillis = 5000;
private final MyServerSideService myServerSideService;
private final BlockingQueue<Pair<Integer, CompletableFuture>> queue = new LinkedBlockingQueue<>();
private final Lock lock = new ReentrantLock();
private final Condition requestAddedCondition = lock.newCondition();
public MyClientSideServiceCompletableFuture(MyServerSideService myServerSideService) {
this.myServerSideService = myServerSideService;
startQueueDrainer();
}
public String requestSingleRow(int req) {
CompletableFuture<String> future = new CompletableFuture<>();
while (!queue.offer(Pair.of(req, future))) {
log.error("Can't add {} to the queue. Retrying...", req);
}
lock.lock();
try {
requestAddedCondition.signal();
} finally {
lock.unlock();
}
try {
return future.get();
} catch (Exception e) {
return "Exception";
}
}
private void startQueueDrainer() {
new Thread(() -> {
log.info("request");
while (true) {
ArrayList<Pair<Integer, CompletableFuture>> requests = new ArrayList<>();
if (queue.drainTo(requests, batchSize) > 0) {
log.info("drained {} items", requests.size());
Map<Integer, String> serverResponseMap = requestBatch(requests.stream().map(Pair::getKey).collect(Collectors.toList()));
requests.forEach(pair -> {
String response = serverResponseMap.get(pair.getKey());
CompletableFuture<String> value = pair.getValue();
value.complete(response);
});
} else {
lock.lock();
try {
while (queue.size() == 0) {
try {
log.info("Waiting on condition");
requestAddedCondition.await(timeoutMillis, TimeUnit.MILLISECONDS);
log.info("Waking up on condition");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} finally {
lock.unlock();
}
}
}
}).start();
}
public Map<Integer, String> requestBatch(Collection<Integer> requestList) {
return myServerSideService.requestRowBatch(requestList);
}
}
It looks like a working solution. But I am not sure if it is optimal.
Your MyClientSideServiceCompletableFuture solution, will send the requests to the server every time you add something to the queue and doesnt wait for requests to be batch sized. You are using BlockingQueue and adding the uneccessary blocking condition and locks. BlockingQueue has blocking-timeout capabilites so no addition condition is neccessary.
You can simplify your solution like this:
It sends requests to server only when the batch is full or the timeout passed and batch is not empty.
private void startQueueDrainer() {
new Thread(() -> {
log.info("request");
ArrayList<Pair<Integer, CompletableFuture>> batch = new ArrayList<>(batchSize);
while (true) {
try {
batch.clear(); //clear batch
long timeTowWait = timeoutMillis;
long startTime = System.currentTimeMillis();
while (timeTowWait > 0 && batch.size() < batchSize) {
Pair<Integer, CompletableFuture> request = queue.poll(timeTowWait , TimeUnit.MILLISECONDS);
if(request != null){
batch.add(request);
}
long timeSpent = (System.currentTimeMillis() - startTime);
timeTowWait = timeTowWait - timeSpent;
}
if (!batch.isEmpty()) {
// we wait at least timeoutMillis or batch is full
log.info("send {} requests to server", batch.size());
Map<Integer, String> serverResponseMap = requestBatch(batch.stream().map(Pair::getKey).collect(Collectors.toList()));
batch.forEach(pair -> {
String response = serverResponseMap.get(pair.getKey());
CompletableFuture<String> value = pair.getValue();
value.complete(response);
});
} else {
log.info("We wait {} but the batch is still empty", System.currentTimeMillis() - startTime);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
}
Change the method requestSingleRow to not use lock
public String requestSingleRow(int req) {
CompletableFuture<String> future = new CompletableFuture<>();
while (!queue.offer(Pair.of(req, future))) {
log.error("Can't add {} to the queue. Retrying...", req);
}
try {
return future.get();
} catch (Exception e) {
return "Exception";
}
}
You could use CompletableFuture.
Have threads calling MyClientSideService put their request in a Queue (possibly BlockingQueue, and get a new CompletableFuture in return. The calling thread can call CompletableFuture.get() to block until a result is ready, or go on doing other things.
That CompletableFuture will be stored together with the request in MyClientSideService. When you reach 50 requests (and therefore 50 CompletableFuture instances), have the client service send the batch request.
When the request is complete, use the CompletableFuture.complete(value) method of each ComplatableFuture instance in the queue to notify the client thread that the response is ready. This will unblock the client if it has called blocking method like CompletableFuture.get(), or make it return instantly with value if called later.
There is a task: to make a multi-threaded execution of functions using AsynchronousSocketChannel. To simulate long work on the server side i'm using Thread.sleep() (stored in the class Worker). If I sleep() threads for more than 2 seconds, when obtaining data on the client side using ByteBuffer and Future, flies java.lang.IndexOutOfBoundsException (my function printFuncResult) Tell me, please, what's the problem?
Server:
public class Server {
public static final InetSocketAddress hostAddress = new InetSocketAddress("localhost", 5678);
private AsynchronousServerSocketChannel serverChannel;
private AsynchronousSocketChannel clientChannel;
ExecutorService threadPool;
public Server() throws IOException, ExecutionException, InterruptedException {
serverChannel = AsynchronousServerSocketChannel.open();
serverChannel.bind(hostAddress);
System.out.println("Server channel bound to port: " + hostAddress.getPort());
System.out.println("Waiting for client to connect... ");
getClientChannel();
handleArguments();
}
private void getClientChannel() throws ExecutionException, InterruptedException {
Future<AsynchronousSocketChannel> acceptResult = serverChannel.accept();
clientChannel = acceptResult.get();
}
private void handleArguments() throws IOException, InterruptedException {
if ((clientChannel != null) && (clientChannel.isOpen())) {
ByteBuffer buffer = ByteBuffer.allocate(32);
Future<Integer> result = clientChannel.read(buffer);
while (! result.isDone()) {
// System.out.println("Result coming... ");
}
buffer.flip();
int x = buffer.getInt(0);
Worker workerOne = new Worker(Worker.TYPE_F, x, clientChannel);
Worker workerTwo = new Worker(Worker.TYPE_G, x, clientChannel);
threadPool = Executors.newFixedThreadPool(2);
threadPool.execute(workerOne);
threadPool.execute(workerTwo);
Thread.sleep(3000);
clientChannel.close();
}
}
}
class Worker implements Runnable {
public static final int TYPE_F = 1;
public static final int TYPE_G = 2;
private int x;
private int type;
private AsynchronousSocketChannel clientChannel;
public Worker(int type, int x, AsynchronousSocketChannel clientChannel) {
this.x = x;
this.type = type;
this.clientChannel = clientChannel;
}
private void sendResultToClient(int res) {
ByteBuffer buffer = ByteBuffer.allocate(32);
if (type == TYPE_F) {
res = 4545;
} else {
res = 34234;
}
buffer.putInt(0, type);
buffer.putInt(4, res);
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
for (int i = 0; i < 10000; ++i) {
for (int j = 0; j < 10000; ++j) {
int k = i*j + i/(j +1) + i + j + Math.max(i, j);
}
}
boolean written = false;
while (!written) {
try {
clientChannel.write(buffer);
written = true;
} catch (Exception e) {}
}
}
#Override
public void run() {
int result = -1;
Random random = new Random();
switch (type) {
case TYPE_F :
result = (int)Math.pow(x, x);
break;
case TYPE_G :
result = (int)Math.pow(x, x / 2);
break;
}
sendResultToClient(result);
}
}
Client:
public class Client {
private AsynchronousSocketChannel clientChannel;
ExecutorService threadPool;
public Client(int x) throws IOException, InterruptedException, ExecutionException {
threadPool = Executors.newFixedThreadPool(2);
boolean connected = false;
while (!connected) {
try {
clientChannel = AsynchronousSocketChannel.open();
Future<Void> future = clientChannel.connect(Server.hostAddress);
future.get();
connected = true;
} catch (ExecutionException e) {}
}
System.out.println("Client is started: " + clientChannel.isOpen());
System.out.println("Sending messages to server: ");
sendArguments(x);
//showCancelDialog();
listenResult();
clientChannel.close();
}
private void sendArguments(int x) throws InterruptedException, IOException {
ByteBuffer buffer = ByteBuffer.allocate(32);
buffer.putInt(0, x);
Future<Integer> result = clientChannel.write(buffer);
while (! result.isDone()) {
System.out.println("... ");
}
}
private boolean waitForResult(Future <Pair <Integer, Integer>> futureResult) {
Scanner sc = new Scanner(System.in);
while (!futureResult.isDone() ) {
System.out.println("DOING.. break? (y/n)");
String input = sc.next();
if (input.equals("y")) {
System.out.println("CANCELLED");
threadPool.shutdownNow();
return false;
}
}
return true;
}
private void printFuncResult(Future <Pair <Integer, Integer>> futureResult) throws ExecutionException, InterruptedException {
Integer funcType = new Integer(futureResult.get().getKey());
Integer result = new Integer(futureResult.get().getValue());
System.out.println("RESULT OF "+ funcType +" FUNC = " + result);
}
private void listenResult() throws ExecutionException, InterruptedException {
System.out.println("Wating for result...");
Listener listener = new Listener(clientChannel);
Future <Pair <Integer, Integer>> futureResult = threadPool.submit(listener);
if (!waitForResult(futureResult)) {
return;
}
printFuncResult(futureResult);
futureResult = threadPool.submit(listener);
if (!waitForResult(futureResult)) {
return;
}
printFuncResult(futureResult);
}
}
class Listener implements Callable <Pair <Integer, Integer>> {
private AsynchronousSocketChannel clientChannel;
public Listener(AsynchronousSocketChannel channel) {
this.clientChannel = channel;
}
#Override
public Pair <Integer, Integer> call() throws Exception {
ByteBuffer buffer = ByteBuffer.allocate(32);
Future<Integer> futureResult = clientChannel.read(buffer);
while (! futureResult.isDone()) {}
buffer.flip();
Integer type = new Integer(buffer.getInt(0));
Integer result = new Integer (buffer.getInt(4));
return new Pair<Integer, Integer>(type, result);
}
}
}
The IndexOutOfBoundsException is not coming from printFuncResult. It is only stored in the future and printed with the stack trace there. The IndexOutOfBoundsException is generated in Listener call function on this line:
Integer type = new Integer(buffer.getInt(0));
This will happen if the read did not read a sufficient number of bytes.
I suggest you replace this inefficient and hard to debug while loop .
while (! futureResult.isDone()) {}
with something like
int bytes_read = futureResult.get();
if(bytes_read != 32) {
// log error or throw exception or retry ...
}
I am trying to spawn off a handful of threads and place them in a List as they execute. As they complete their processing I would like to collect their results for presentation. That way I can have a list containing many threads and then once they become available I can call future.get and use their callback information.
For some reason, I am missing many of the results. When I step through the code, f.get() is being passed over when it shouldn't be and I cannot figure out why.
My code is as follows:
public class ThreadTesterRunner {
static List<Integer> randoms = new ArrayList<>();
public static void main(String[] args) throws InterruptedException {
final Phaser cb = new Phaser();
ThreadRunner tr = new ThreadRunner(cb);
Thread t = new Thread(tr, "Thread Runner");
t.start();
boolean process = true;
// wait until all threads process, then print reports
while (process){
if(tr.isFinished()){
System.out.println("Print metrics");
process = false;
}
Thread.sleep(1000);
}
}
}
class ThreadRunner implements Runnable {
private ExecutorService executorService = Executors.newFixedThreadPool(10);
private final Phaser barrier;
private boolean finished=false;
public ThreadRunner(Phaser phaser) {this.barrier = phaser;}
public void run(){
try {
List<Future<Integer>> list = new ArrayList<>();
boolean stillLoop = true; int i = 0;
final Phaser p = this.barrier;
Callable<Integer> task = new Callable<Integer>() {
public Integer call() throws Exception {
return new Reader().doRun(p);
}
};
List<Integer> randoms = new ArrayList<>();
Integer size;
while (stillLoop){
System.out.println("i "+i);
list.add(executorService.submit(task));
for(Future<Integer> f: list){
if(f.isDone()){
size = f.get();
System.out.println("size "+size);
list.remove(f);
} else {
// keep processing
}
}
if(i == 2){
System.out.println("breaking out of loop");
stillLoop = false;
}
i++;
}
this.barrier.awaitAdvance(0);
this.finished=true;
} catch (Exception e1) {
e1.printStackTrace();
}
}
public boolean isFinished(){
return this.finished;
}
}
class Reader {
private Phaser readBarrier;
private ExecutorService executorService = Executors.newFixedThreadPool(20);
public Reader() {
}
Random randomGenerator = new Random();
public Integer doRun(Phaser phaser) throws Exception {
phaser.register();
this.readBarrier = phaser;
System.out.println("Reading...");
int i;
int r = randomGenerator.nextInt(100);
System.out.println("r "+r);
ThreadTesterRunner.randoms.add(r);
int a = this.readBarrier.arrive();
return r; //i;
}
}
Any idea's as to why this may be happening?
EDIT:
Alright, I think I have it up and running:
class ThreadRunner implements Runnable {
// static int timeOutTime = 2;
private ExecutorService executorService = Executors.newFixedThreadPool(10);
private final Phaser barrier;
private boolean finished = false;
public ThreadRunner(Phaser phaser) {
this.barrier = phaser;
}
public void run() {
try {
List<Future<Integer>> list = new CopyOnWriteArrayList<>();
boolean stillLoop = true;
int i = 0;
final Phaser p = this.barrier;
Callable<Integer> readerTask = new Callable<Integer>() {
public Integer call() throws Exception {
return new Reader().doRun(p);
}
};
List<Integer> randoms = new ArrayList<>();
Integer size;
while (stillLoop) {
if (i <= 2) {
list.add(executorService.submit(readerTask));
}
if (!list.isEmpty()) {
for (Future<Integer> f : list) {
if (f.isDone()) {
size = f.get();
randoms.add(size);
System.out.println("Process read with a size of "+ size);
list.remove(f);
} else {
// System.out.println("skipping");
}
}
} else {
stillLoop = false;
}
i++;
}
System.out.println("at barrier waiting");
this.barrier.awaitAdvance(0);
System.out.println("barrier crossed");
this.finished = true;
} catch (Exception e1) {
e1.printStackTrace();
}
}
public boolean isFinished() {
return this.finished;
}
}
Results:
i 0
i 1
i 2
breaking out of loop
Reading...
Reading...
r 13
r 44
Reading...
r 78
Print metrics
I changed the ArrayList to a Vector since ArrayList is not thread safe which would eventually cause a ConcurrentModificationException.
Is the above output what you would expect?
What should I use to get semantics equivalent to AutoResetEvent in Java?
(See this question for ManualResetEvent).
#user249654's answer looked promising. I added some unit tests to verify it, and indeed it works as expected.
I also added an overload of waitOne that takes a timeout.
The code is here in case anyone else finds it useful:
Unit Test
import org.junit.Assert;
import org.junit.Test;
import static java.lang.System.currentTimeMillis;
/**
* #author Drew Noakes http://drewnoakes.com
*/
public class AutoResetEventTest
{
#Test
public void synchronisesProperly() throws InterruptedException
{
final AutoResetEvent event1 = new AutoResetEvent(false);
final AutoResetEvent event2 = new AutoResetEvent(false);
final int loopCount = 10;
final int sleepMillis = 50;
Thread thread1 = new Thread(new Runnable()
{
#Override
public void run()
{
try {
for (int i = 0; i < loopCount; i++)
{
long t = currentTimeMillis();
event1.waitOne();
Assert.assertTrue("Time to wait should be within 5ms of sleep time",
Math.abs(currentTimeMillis() - t - sleepMillis) < 5);
Thread.sleep(sleepMillis);
t = currentTimeMillis();
event2.set();
Assert.assertTrue("Time to set should be within 1ms", currentTimeMillis() - t <= 1);
}
} catch (InterruptedException e) {
Assert.fail();
}
}
});
Thread thread2 = new Thread(new Runnable()
{
#Override
public void run()
{
try {
for (int i = 0; i < loopCount; i++)
{
Thread.sleep(sleepMillis);
long t = currentTimeMillis();
event1.set();
Assert.assertTrue("Time to set should be within 1ms", currentTimeMillis() - t <= 1);
t = currentTimeMillis();
event2.waitOne();
Assert.assertTrue("Time to wait should be within 5ms of sleep time",
Math.abs(currentTimeMillis() - t - sleepMillis) < 5);
}
} catch (InterruptedException e) {
Assert.fail();
}
}
});
long t = currentTimeMillis();
thread1.start();
thread2.start();
int maxTimeMillis = loopCount * sleepMillis * 2 * 2;
thread1.join(maxTimeMillis);
thread2.join(maxTimeMillis);
Assert.assertTrue("Thread should not be blocked.", currentTimeMillis() - t < maxTimeMillis);
}
#Test
public void timeout() throws InterruptedException
{
AutoResetEvent event = new AutoResetEvent(false);
int timeoutMillis = 100;
long t = currentTimeMillis();
event.waitOne(timeoutMillis);
long took = currentTimeMillis() - t;
Assert.assertTrue("Timeout should have occurred, taking within 5ms of the timeout period, but took " + took,
Math.abs(took - timeoutMillis) < 5);
}
#Test
public void noBlockIfInitiallyOpen() throws InterruptedException
{
AutoResetEvent event = new AutoResetEvent(true);
long t = currentTimeMillis();
event.waitOne(200);
Assert.assertTrue("Should not have taken very long to wait when already open",
Math.abs(currentTimeMillis() - t) < 5);
}
}
AutoResetEvent with overload that accepts a timeout
public class AutoResetEvent
{
private final Object _monitor = new Object();
private volatile boolean _isOpen = false;
public AutoResetEvent(boolean open)
{
_isOpen = open;
}
public void waitOne() throws InterruptedException
{
synchronized (_monitor) {
while (!_isOpen) {
_monitor.wait();
}
_isOpen = false;
}
}
public void waitOne(long timeout) throws InterruptedException
{
synchronized (_monitor) {
long t = System.currentTimeMillis();
while (!_isOpen) {
_monitor.wait(timeout);
// Check for timeout
if (System.currentTimeMillis() - t >= timeout)
break;
}
_isOpen = false;
}
}
public void set()
{
synchronized (_monitor) {
_isOpen = true;
_monitor.notify();
}
}
public void reset()
{
_isOpen = false;
}
}
class AutoResetEvent {
private final Object monitor = new Object();
private volatile boolean open = false;
public AutoResetEvent(boolean open) {
this.open = open;
}
public void waitOne() throws InterruptedException {
synchronized (monitor) {
while (open == false) {
monitor.wait();
}
open = false; // close for other
}
}
public void set() {
synchronized (monitor) {
open = true;
monitor.notify(); // open one
}
}
public void reset() {//close stop
open = false;
}
}
I was able to get CyclicBarrier to work for my purposes.
Here is the C# code I was trying to reproduce in Java (it's just a demonstration program I wrote to isolate the paradigm, I now use it in C# programs I write to generate video in real time, to provide accurate control of the frame rate):
using System;
using System.Timers;
using System.Threading;
namespace TimerTest
{
class Program
{
static AutoResetEvent are = new AutoResetEvent(false);
static void Main(string[] args)
{
System.Timers.Timer t = new System.Timers.Timer(1000);
t.Elapsed += new ElapsedEventHandler(delegate { are.Set(); });
t.Enabled = true;
while (true)
{
are.WaitOne();
Console.WriteLine("main");
}
}
}
}
and here is the Java code I came up with to do the same thing (using the CyclicBarrier class as suggested in a previous answer):
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.CyclicBarrier;
public class TimerTest2 {
static CyclicBarrier cb;
static class MyTimerTask extends TimerTask {
private CyclicBarrier cb;
public MyTimerTask(CyclicBarrier c) { cb = c; }
public void run() {
try { cb.await(); }
catch (Exception e) { }
}
}
public static void main(String[] args) {
cb = new CyclicBarrier(2);
Timer t = new Timer();
t.schedule(new MyTimerTask(cb), 1000, 1000);
while (true) {
try { cb.await(); }
catch (Exception e) { }
System.out.println("main");
}
}
}
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
public class AutoResetEvent {
private volatile boolean _signaled;
private ReentrantLock _lock;
private Condition _condition;
public AutoResetEvent(boolean initialState) {
_signaled = initialState;
_lock = new ReentrantLock();
_condition = _lock.newCondition();
}
public void waitOne(long miliSecond) throws InterruptedException {
_lock.lock();
try {
while (!_signaled)
_condition.await(miliSecond, TimeUnit.MILLISECONDS);
_signaled = false;
} finally {
_lock.unlock();
}
}
public void waitOne() throws InterruptedException {
_lock.lock();
try {
while (!_signaled)
_condition.await();
_signaled = false;
} finally {
_lock.unlock();
}
}
public void set() {
_lock.lock();
try {
_condition.signal();
_signaled = true;
} finally {
_lock.unlock();
}
}
public void reset() {
_lock.lock();
try {
_signaled = false;
} finally {
_lock.unlock();
}
}
}
One more extension to the solution from the accepted answer in case you would like to know whether your wait finished with timeout or with event set (which is exactly what .NET AutoResetEvent does).
public boolean waitOne(long timeout) throws InterruptedException {
synchronized (monitor) {
try {
long t = System.currentTimeMillis();
while (!isOpen) {
monitor.wait(timeout);
// Check for timeout
if (System.currentTimeMillis() - t >= timeout)
break;
}
return isOpen;
}
finally {
isOpen = false;
}
}
}
I believe what you're looking for is either a CyclicBarrier or a CountDownLatch.