ThreadPoolTaskExecutor throws RejectedExecutionException - java

I want to implement following behaviour:
Read n events from file
Process them i n threads
Go back to to step 1 if any events remains
I wrote following app to test solution but it fails in random moment, eg.
java.lang.IllegalStateException: Failed to execute CommandLineRunner
Caused by: org.springframework.core.task.TaskRejectedException: Executor [java.util.concurrent.ThreadPoolExecutor#62b3df3a[Running, pool size = 5, active threads = 4, queued tasks = 0, completed tasks = 70]] did not accept task: java.util.concurrent.CompletableFuture$AsyncSupply#71ea1fda
What queue capacity should I set if I don't want to put events in queue? I want to process them immediately.
I am using Open JDK 11 and Spring Boot 2.2.2.RELEASE
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
#SpringBootApplication
public class DemoApplication implements CommandLineRunner {
#Autowired
private ThreadPoolTaskExecutor eventExecutor;
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
#Bean(name = "eventExecutor")
public ThreadPoolTaskExecutor eventExecutor() {
ThreadPoolTaskExecutor pool = new ThreadPoolTaskExecutor();
pool.setCorePoolSize(5);
pool.setMaxPoolSize(5);
pool.setQueueCapacity(0);
pool.setAwaitTerminationSeconds(0);
pool.initialize();
return pool;
}
#Override
public void run(String... args) {
System.out.println("Start events processing");
long start = System.currentTimeMillis();
int result = 0;
for (int i = 0; i < 100; i++) {
List<CompletableFuture<Integer>> completableFutures = getEvents(5).stream()
.map(event -> CompletableFuture.supplyAsync(() -> processEvent(event), eventExecutor))
.collect(Collectors.toList());
result += completableFutures.stream()
.mapToInt(CompletableFuture::join)
.sum();
}
long timeMillis = System.currentTimeMillis() - start;
System.out.println("Took " + timeMillis + "ms, " + result);
}
private List<Event> getEvents(int n) {
List<Event> events = new ArrayList<>();
for (int i = 1; i <= n; i++) {
events.add(new Event(i));
}
return events;
}
private int processEvent(Event event) {
System.out.println("processing event " + event.id);
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("processing event " + event.id + " finished");
return 1;
}
private static class Event {
private int id;
private Event(int id) {
this.id = id;
}
}
}

I'll suggest to change pool.setQueueCapacity(0) to use a positive value to allow for tasks to be queued up for processing when there are no threads available in the so-configured fixed-size pool (corePoolSize == maxPoolSize == 5).
The output "pool size = 5, active threads = 4" shows approximate number of active threads.
In theory it could happen that threads are not returned back into the pool before an attempt to process new batch of events.

Related

ForkJoinPool result never arrived

Hi I am new to Java concurrency and I am trying to Double the List Content by fork join and dividing the task into multiple parts.
The task gets Completed but result never arrived.
package com.learning;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.TimeUnit;
class DoubleNumbers extends RecursiveTask<List<Integer>> {
private final List<Integer> listToDo;
public DoubleNumbers(List<Integer> list) {
System.out.println("Cons Called"+list.get(0));
this.listToDo = list;
}
#Override
protected List<Integer> compute() {
List<DoubleNumbers> doubleNumbersList= new ArrayList<>();
System.out.println(Thread.currentThread().toString());
for (int i = 0; i < listToDo.size(); i++) {
listToDo.set(i, listToDo.get(i) * 2);
}
return listToDo;
}
}
public class FJPExample {
public static void main(String[] args) {
List<Integer> arrayList = new ArrayList<>();
for (int i = 0; i < 149; i++) {
arrayList.add(i, i);
}
ForkJoinPool forkJoinPool = new ForkJoinPool(4);
System.out.println(forkJoinPool.getParallelism());
DoubleNumbers doubleNumbers = new DoubleNumbers(arrayList.subList(0, 49));
DoubleNumbers doubleNumbers50ToNext = new DoubleNumbers(arrayList.subList(50, 99));
DoubleNumbers doubleNumbers100ToNext = new DoubleNumbers(arrayList.subList(100, 149));
forkJoinPool.submit(doubleNumbers);
forkJoinPool.execute(doubleNumbers50ToNext);
forkJoinPool.execute(doubleNumbers100ToNext);
do {
System.out.println("Parallel " + forkJoinPool.getParallelism());
System.out.println("isWorking" + forkJoinPool.getRunningThreadCount());
System.out.println("isQSubmission" + forkJoinPool.getQueuedSubmissionCount());
try {
TimeUnit.SECONDS.sleep(1000);
} catch (InterruptedException e) {
//
}
} while ((!doubleNumbers.isDone()) || (!doubleNumbers50ToNext.isDone()) || (!doubleNumbers100ToNext.isDone()));
forkJoinPool.shutdown(); // Line 56
arrayList.addAll(doubleNumbers.join());
arrayList.addAll(doubleNumbers50ToNext.join());
arrayList.addAll(doubleNumbers100ToNext.join());
System.out.println(arrayList.size());
arrayList.forEach(System.out::println);
}
}
If I debug my task then I am able to find the numbers gets doubled but the result never arrived at line no 56
Issue is with the code arrayList.addAll(doubleNumbers.join()) , line# 54, 55 and 56 because this may result into ConcurrentModificationException. So, what you can do is, replace these lines with below lines and it will work(it will work because you have used arrayList.subList at line #36 which is backed by same arraylist, read its javadoc for more info)
doubleNumbers.join();
doubleNumbers50ToNext.join();
doubleNumbers100ToNext.join();

Streams performance difference

Performance difference on 2 different streams executions
I try to do the same operation for default parallel stream and using custom ForkJoin pool.
I see huge performance difference for the same operation.
94 ms vs ~5341 ms (Time1 and Time2 are almost the same - so I don't blame awaitQuiescence here)
What can be a reason ? Tricky java intrinsic ?
public final class SharedForkJoinExecutor {
private static final Logger LOGGER = LoggerFactory.getLogger(SharedForkJoinExecutor.class);
private static final ForkJoinPool EXEC = new ForkJoinPool(ForkJoinPool.commonPool().getParallelism(),
pool -> {
final ForkJoinWorkerThread aThread = ForkJoinPool.defaultForkJoinWorkerThreadFactory.newThread(pool);
aThread.setName("ForkJoin-Executor-" + aThread.getPoolIndex());
return aThread;
},
(t, e) -> LOGGER.info(e.getMessage(), e),
true);
/**
* Shuts down this executor
*/
public static void shutdown() {
EXEC.shutdown();
}
public static ForkJoinPool get() {
return EXEC;
}
}
package com.stream;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.IntStream;
import com.stream.SharedForkJoinExecutor;
import org.junit.Test;
import static junit.framework.TestCase.assertEquals;
public class ForkJoinTest {
private static final int INT_NUMBERS = 1_000_000;
#Test
public void forEachIntTest() {
final AtomicInteger aEvenCounter = new AtomicInteger(0);
final AtomicInteger aAllCounter = new AtomicInteger(0);
long t = System.currentTimeMillis();
IntStream.range(0, INT_NUMBERS).parallel().forEach(theIndex -> {
if (theIndex % 2 == 0) {
aEvenCounter.incrementAndGet();
}
aAllCounter.incrementAndGet();
});
System.out.println("Time=" + (System.currentTimeMillis() - t));
assertEquals(INT_NUMBERS / 2, aEvenCounter.get());
assertEquals(INT_NUMBERS, aAllCounter.get());
aEvenCounter.set(0);
aAllCounter.set(0);
t = System.currentTimeMillis();
SharedForkJoinExecutor.get().execute(() -> IntStream.range(0, INT_NUMBERS).parallel().forEach(theIndex -> {
if (theIndex % 2 == 0) {
aEvenCounter.incrementAndGet();
}
aAllCounter.incrementAndGet();
}));
System.out.println("Time1=" + (System.currentTimeMillis() - t));
SharedForkJoinExecutor.get().awaitQuiescence(10, TimeUnit.HOURS);
System.out.println("Time2=" + (System.currentTimeMillis() - t));
assertEquals(INT_NUMBERS / 2, aEvenCounter.get());
assertEquals(INT_NUMBERS, aAllCounter.get());
}
}

Can we store java.util.concurrent.Future values in a Global variable? Is that correct way?

My testNg test1 will call the executor.submit(callable); the responsibility of that callable is to trigger a action and i ll store it in List<Future<T>> and i have declared that List<Future<T>> as Global variable.And my second TestNg test will pick it and it will call Future.get() and it will display the result.
can anyone help me!
import com.google.common.collect.Lists;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
public class test1 {
public static final List<Future<Boolean>> callBackList = new ArrayList<>();
#DataProvider(name = "HappyTestCases")
protected Iterator<Object[]> getHappyTestCases(){
List<Integer> inputTestDataIntValue = new ArrayList<>();
inputTestDataIntValue.add(1);
inputTestDataIntValue.add(2);
inputTestDataIntValue.add(3);
inputTestDataIntValue.add(4);
inputTestDataIntValue.add(5);
inputTestDataIntValue.add(6);
inputTestDataIntValue.add(7);
inputTestDataIntValue.add(8);
inputTestDataIntValue.add(9);
inputTestDataIntValue.add(10);
List<Object[]> testCasesAsObjectArray = inputTestDataIntValue.stream().map(testCaseObj -> new Object[]{testCaseObj})
.collect(Collectors.toList());
return testCasesAsObjectArray.iterator();
}
#DataProvider(name = "callBackCases")
protected Iterator<Object[]> getcallBackCases(){
List<Object[]> testCasesAsObjectArray = callBackList.stream().map(testCaseObj -> new Object[]{testCaseObj})
.collect(Collectors.toList());
return testCasesAsObjectArray.iterator();
}
#org.testng.annotations.Test(dataProvider = "HappyTestCases")
public void runTestNgTest(Integer data) throws Exception {
RunInitiator(Arrays.asList(data));
}
#Test(dataProvider = "HappyTestCases")
public void runCallBackList(Future<Boolean> callback) throws Exception{
List<Boolean> reslutList = runCallBack(Arrays.asList(callback));
for(Boolean b : reslutList) {
if(b) {
System.out.println("TestCase passed for testCase Id: ");
}else {
System.out.println("TestCase failed for testCase Id: ");
}
}
}
public void RunInitiator(List<Integer> inputTestDataIntValue) throws Exception {
ExecutorService executor = Executors.newFixedThreadPool(10);
for(Integer integer : inputTestDataIntValue) {
System.out.println("intValue = " + integer);
System.out.println("executor is going to start");
Future<Boolean> callBack = executor.submit(new Task(integer));
System.out.println("Thread Name = "+ Thread.currentThread().getName());
callBackList.add(callBack);
}
System.out.println("successfully finished runIntiator");
}
public List<Boolean> runCallBack(List<Future<Boolean>> callBackList) throws Exception{
List<Boolean> resultList = Lists.newArrayList();
//iterate and give it to different test.
for(Future<Boolean> callBack : callBackList) {
Boolean result = callBack.get();
resultList.add(result);
System.out.println("result = "+ result);
}
return resultList;
}
static class Task implements Callable<Boolean> {
public Integer intValue;
public Task(int intValue) {
this.intValue = intValue;
}
#Override
public Boolean call() throws Exception {
System.out.println("Thread Name inside callable = " + Thread.currentThread().getName());
System.out.println("Sleeping for 2mins");
Thread.sleep(120000);
if (intValue < 5) {
return true;
} else {
return false;
}
}
}
}

Comparing Fork And Join with Single threaded program

I am trying to get started with the Fork-Join framework for a smaller task. As I start-up example I tried copying mp3 files
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
public class DeepFileCopier extends RecursiveTask<String>{
/**
*
*/
private static final long serialVersionUID = 1L;
private static Path startingDir = Paths.get("D:\\larsen\\Music\\");
private static List<Path> listOfPaths = new ArrayList<>();
private int start, end;
public static void main(String[] args) throws IOException
{
long startMillis = System.currentTimeMillis();
Files.walkFileTree(startingDir, new CustomFileVisitor());
final DeepFileCopier deepFileCopier = new DeepFileCopier(0,listOfPaths.size());
final ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
pool.invoke(deepFileCopier);
System.out.println("With Fork-Join " + (System.currentTimeMillis() - startMillis));
long secondStartMillis = System.currentTimeMillis();
deepFileCopier.start = 0;
deepFileCopier.end = listOfPaths.size();
deepFileCopier.computeDirectly();
System.out.println("Without Fork-Join " + (System.currentTimeMillis() - secondStartMillis));
}
private static class CustomFileVisitor extends SimpleFileVisitor<Path> {
#Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException
{
if (file.toString().endsWith(".mp3")) {
listOfPaths.add(file);
}
return FileVisitResult.CONTINUE;
}
}
#Override
protected String compute() {
int length = end-start;
if(length < 4) {
return computeDirectly();
}
int split = length / 2;
final DeepFileCopier firstHalfCopier = new DeepFileCopier(start, start + split);
firstHalfCopier.fork();
final DeepFileCopier secondHalfCopier = new DeepFileCopier(start + split, end);
secondHalfCopier.compute();
firstHalfCopier.join();
return null;
}
private String computeDirectly() {
for(int index = start; index< end; index++) {
Path currentFile = listOfPaths.get(index);
System.out.println("Copying :: " + currentFile.getFileName());
Path targetDir = Paths.get("D:\\Fork-Join Test\\" + currentFile.getFileName());
try {
Files.copy(currentFile, targetDir, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
private DeepFileCopier(int start, int end ) {
this.start = start;
this.end = end;
}
}
On comparing the performance I noticed -
With Fork-Join 149714
Without Fork-Join 146590
Am working on a Dual Core machine. I was expecting a 50% reduction in the work time but the portion with Fork-Join takes 3 seconds more than a single threaded approach. Please let me know if some thing is incorrect.
Your problem is not well suited to benefit from multithreading on normal systems. The execution time is spent copying all the files. But this is limited by your hard drive that will process the files in sequence.
If you run a more CPU intense task, you should note a difference. For test purposes you could try the following:
private String computeDirectly() {
Integer nonsense;
for(int index = start; index< end; index++) {
for( int j = 0; j < 1000000; j++ )
nonsense += index*j;
}
return nonsense.toString();
}
On my system (i5-2410M) this will print:
With Fork-Join 2628
Without Fork-Join 6421

Multithreaded test to test response time of sites/web services

Below code tests the response time of reading www.google.com into a BufferedReader. I plan on using this code to test the response times of other sites and web services within intranet. Below tests runs for 20 seconds and opens 4 requests per second :
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
public class ResponseTimeTest {
private static final int NUMBER_REQUESTS_PER_SECOND = 4;
private static final int TEST_EXECUTION_TIME = 20000;
private static final ConcurrentHashMap<Long, Long> timingMap = new ConcurrentHashMap<Long, Long>();
#Test
public void testResponseTime() throws InterruptedException {
ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(10);
scheduler.scheduleAtFixedRate(new RequestThreadCreator(), 0, 1, TimeUnit.SECONDS);
Thread.sleep(TEST_EXECUTION_TIME);
System.out.println("Start Time, End Time, Total Time");
for (Entry<Long, Long> entry : timingMap.entrySet())
{
System.out.println(entry.getKey() + "," + entry.getValue() +","+(entry.getValue() - entry.getKey()));
}
}
private final class RequestThreadCreator implements Runnable {
public void run() {
ExecutorService es = Executors.newCachedThreadPool();
for (int i = 1; i <= NUMBER_REQUESTS_PER_SECOND; i++) {
es.execute(new RequestThread());
}
es.shutdown();
}
}
private final class RequestThread implements Runnable {
public void run() {
long startTime = System.currentTimeMillis();
try {
URL oracle = new URL("http://www.google.com/");
URLConnection yc = oracle.openConnection();
BufferedReader in = new BufferedReader(new InputStreamReader(yc.getInputStream()));
while ((in.readLine()) != null) {
}
in.close();
} catch (Exception e) {
e.printStackTrace();
}
long endTime = System.currentTimeMillis();
timingMap.put(startTime, endTime);
}
}
}
The output is :
Start Time, End Time, Total Time
1417692221531,1417692221956,425
1417692213530,1417692213869,339
1417692224530,1417692224983,453
1417692210534,1417692210899,365
1417692214530,1417692214957,427
1417692220530,1417692221041,511
1417692209530,1417692209949,419
1417692215532,1417692215950,418
1417692214533,1417692215075,542
1417692213531,1417692213897,366
1417692212530,1417692212924,394
1417692219530,1417692219897,367
1417692226532,1417692226876,344
1417692211530,1417692211955,425
1417692209529,1417692209987,458
1417692222531,1417692222967,436
1417692215533,1417692215904,371
1417692219531,1417692219954,423
1417692215530,1417692215870,340
1417692217531,1417692218035,504
1417692207547,1417692207882,335
1417692208535,1417692208898,363
1417692207544,1417692208095,551
1417692208537,1417692208958,421
1417692226533,1417692226899,366
1417692224531,1417692224951,420
1417692225529,1417692225957,428
1417692216530,1417692216963,433
1417692223541,1417692223884,343
1417692223546,1417692223959,413
1417692222530,1417692222954,424
1417692208532,1417692208871,339
1417692207536,1417692207988,452
1417692226538,1417692226955,417
1417692220531,1417692220992,461
1417692209531,1417692209953,422
1417692226531,1417692226959,428
1417692217532,1417692217944,412
1417692210533,1417692210964,431
1417692221530,1417692221870,340
1417692216531,1417692216959,428
1417692207535,1417692208021,486
1417692223548,1417692223957,409
1417692216532,1417692216904,372
1417692214535,1417692215071,536
1417692217530,1417692217835,305
1417692213529,1417692213954,425
1417692210531,1417692210964,433
1417692212529,1417692212993,464
1417692213532,1417692213954,422
1417692215531,1417692215957,426
1417692210529,1417692210868,339
1417692218531,1417692219102,571
1417692225530,1417692225907,377
1417692208536,1417692208966,430
1417692218533,1417692219168,635
As System.out.println is synchronized in order to not skew results I add the timings to a ConcurrentHashMap and do not output the timings within the RequestThread itself. Are other gotcha's I should be aware of in above code so as to not skew the results. Or are there area's I should concentrate on in order to improve the accuracy or is it accurate "enough", by enough accurate to approx 100 millliseconds.

Categories

Resources