I am building an implementation of Sprint Integration with two PollableChannels:
Regular channel
Error channel
Messages are polled from the regular channel and processed. If there is an error during processing (e.g., an external service is unavailable), the message is sent into the error channel. From the error channel it is re-queued onto the regular channel, and the cycle continues until the message is successfully processed.
The idea is to poll the error channel infrequently, to give the processor some time to (hopefully) recover.
I have simulated this workflow in the following test:
package com.stackoverflow.questions.sipoller;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.integration.annotation.MessageEndpoint;
import org.springframework.integration.annotation.Poller;
import org.springframework.integration.annotation.Router;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.integration.dsl.MessageChannels;
import org.springframework.messaging.Message;
import org.springframework.messaging.PollableChannel;
import org.springframework.messaging.support.MessageBuilder;
import static org.awaitility.Awaitility.await;
import static org.awaitility.Durations.FIVE_MINUTES;
import static org.awaitility.Durations.ONE_HUNDRED_MILLISECONDS;
#SpringBootTest
class SiPollerApplicationTests {
private final static Logger LOG = LoggerFactory.getLogger(SiPollerApplicationTests.class);
private final static String QUEUE_CHANNEL_REGULAR = "queueChannelRegular";
private final static String QUEUE_CHANNEL_ERROR = "queueChannelError";
private final static String POLLER_PERIOD_REGULAR = "500"; // 0.5 second
private final static String POLLER_PERIOD_ERROR = "3000"; // 3 seconds
private final static AtomicInteger NUMBER_OF_ATTEMPTS = new AtomicInteger();
private final static AtomicInteger NUMBER_OF_SUCCESSES = new AtomicInteger();
private final static List<Instant> ATTEMPT_INSTANTS = Collections.synchronizedList(new ArrayList<>());
#Autowired
#Qualifier(QUEUE_CHANNEL_REGULAR)
private PollableChannel channelRegular;
#Test
void testTimingOfMessageProcessing() {
channelRegular.send(MessageBuilder.withPayload("Test message").build());
await()
.atMost(FIVE_MINUTES)
.with()
.pollInterval(ONE_HUNDRED_MILLISECONDS)
.until(
() -> {
if (NUMBER_OF_SUCCESSES.intValue() == 1) {
reportGaps();
return true;
}
return false;
}
);
}
private void reportGaps() {
List<Long> gaps = IntStream
.range(1, ATTEMPT_INSTANTS.size())
.mapToObj(
i -> Duration
.between(
ATTEMPT_INSTANTS.get(i - 1),
ATTEMPT_INSTANTS.get(i)
)
.toMillis()
)
.collect(Collectors.toList());
LOG.info("Gaps between attempts (in ms): {}", gaps);
}
#Configuration
#EnableIntegration
#Import(SiPollerApplicationTestEndpoint.class)
static class SiPollerApplicationTestConfig {
#Bean(name = QUEUE_CHANNEL_REGULAR)
public PollableChannel queueChannelRegular() {
return MessageChannels.queue(QUEUE_CHANNEL_REGULAR).get();
}
#Bean(name = QUEUE_CHANNEL_ERROR)
public PollableChannel queueChannelError() {
return MessageChannels.queue(QUEUE_CHANNEL_ERROR).get();
}
#Router(
inputChannel = QUEUE_CHANNEL_ERROR,
poller = #Poller(fixedRate = POLLER_PERIOD_ERROR)
)
public String retryProcessing() {
return QUEUE_CHANNEL_REGULAR;
}
}
#MessageEndpoint
static class SiPollerApplicationTestEndpoint {
#Autowired
#Qualifier(QUEUE_CHANNEL_ERROR)
private PollableChannel channelError;
#ServiceActivator(
inputChannel = QUEUE_CHANNEL_REGULAR,
poller = #Poller(fixedRate = POLLER_PERIOD_REGULAR)
)
public void handleMessage(Message<String> message) {
// Count and time attempts
int numberOfAttempts = NUMBER_OF_ATTEMPTS.getAndIncrement();
ATTEMPT_INSTANTS.add(Instant.now());
// First few times - refuse to process message and bounce it into
// error channel
if (numberOfAttempts < 5) {
channelError.send(message);
return;
}
// After that - process message
NUMBER_OF_SUCCESSES.getAndIncrement();
}
}
}
The pom.xml dependencies are:
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>4.0.2</version>
<scope>test</scope>
</dependency>
</dependencies>
Note the configuration for Pollers:
private final static String POLLER_PERIOD_REGULAR = "500"; // 0.5 second
private final static String POLLER_PERIOD_ERROR = "3000"; // 3 seconds
The regular channel is supposed to be polled once in half a second, and the error channel — once in three seconds.
The test simulates outages during message processing: the first five attempts to process the message are rejected. Also, the test records the Instant of every processing attempt. In the end, on my machine, the test outputs:
Gaps between attempts (in ms): [1, 0, 0, 0, 0]
In other words, the message is re-tried almost immediately after each failure.
It seems to me that I fundamentally misunderstand how Pollers work in Spring Integration. So my questions are:
Why is there such a dissonance between the poller configuration and the actual frequency of polling.
Does Spring Integration provide a way to implement the pattern I have described?
There are two settings that can affect this behavior.
QueueChannel pollers will drain the queue by default; setMaxMessagesPerPoll(1) to only receive one message each poll.
Also, by default, the QueueChannel default timeout is 1 second (1000ms).
So the first poll may be sooner than you think; set it to 0 to immediately exit if there are no messages present in the queue.
Related
I have the following code:
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.json.JsonMapper;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import picocli.CommandLine;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Stream;
#CommandLine.Command(name = "red-alert-listener",
mixinStandardHelpOptions = true,
versionProvider = Listener.class,
showDefaultValues = true,
description = "An App that can get \"red alert\"s from IDF's Home Front Command.")
public class Listener implements Runnable, CommandLine.IVersionProvider
{
private static final Logger LOGGER = LogManager.getLogger();
private static final ObjectMapper JSON_MAPPER = new JsonMapper()
.enable(SerializationFeature.INDENT_OUTPUT)
.disable(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS)
.findAndRegisterModules();
private static final Configuration DEFAULT_CONFIGURATION = new Configuration(
false,
false,
true,
false,
Duration.ofMillis(10000),
LanguageCode.HE,
Level.INFO,
Collections.emptySet()
);
private static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient();
private Configuration configuration = DEFAULT_CONFIGURATION;
private List<String> districtsNotFound = Collections.emptyList();
public static void main(String... args)
{
System.exit(new CommandLine(Listener.class)
.setCaseInsensitiveEnumValuesAllowed(true)
.execute(args));
}
private static void sleep()
{
try
{
Thread.sleep(1000);
} catch (InterruptedException interruptedException)
{
interruptedException.printStackTrace(); // TODO think about
}
}
#Override
public String[] getVersion()
{
return new String[]{"Red Alert Listener v" + getClass().getPackage().getImplementationVersion()};
}
#Override
public void run()
{
System.err.println("Preparing Red Alert Listener v" + getClass().getPackage().getImplementationVersion() + "...");
try (Clip clip = AudioSystem.getClip(Stream.of(AudioSystem.getMixerInfo()).parallel().unordered()
.filter(mixerInfo -> "default [default]".equals(mixerInfo.getName()))
.findAny()
.orElse(null));
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(new BufferedInputStream(Objects.requireNonNull(getClass().getResourceAsStream("/alarmSound.wav"))));
InputStream in = System.in)
{
clip.open(audioInputStream);
final URI uri = URI.create("https://www.oref.org.il/WarningMessages/alert/alerts.json");
Set<String> prevData = Collections.emptySet();
Instant currAlertsLastModified = Instant.MIN;
final int minRedAlertEventContentLength = """
{"cat":"1","data":[],"desc":"","id":0,"title":""}""".getBytes(StandardCharsets.UTF_8).length;
System.err.println("Listening...");
while (true)
try
{
final HttpResponse<InputStream> httpResponse = HTTP_CLIENT.send(
HttpRequest.newBuilder(uri)
.header("Accept", "application/json")
.header("X-Requested-With", "XMLHttpRequest")
.header("Referer", "https://www.oref.org.il/12481-" + configuration.languageCode().name().toLowerCase() + "/Pakar.aspx")
.timeout(configuration.timeout())
.build(),
HttpResponse.BodyHandlers.ofInputStream()
);
try (InputStream httpResponseBody = httpResponse.body())
{
if (httpResponse.statusCode() != HttpURLConnection.HTTP_OK/* &&
httpURLConnection.getResponseCode() != HttpURLConnection.HTTP_NOT_MODIFIED*/)
{
LOGGER.error("Connection response status code: {}", httpResponse.statusCode());
sleep();
continue;
}
final Instant alertsLastModified;
final long contentLength = httpResponse.headers().firstValueAsLong("Content-Length").orElse(-1);
if (contentLength < minRedAlertEventContentLength)
prevData = Collections.emptySet();
else if ((alertsLastModified = httpResponse.headers().firstValue("Last-Modified")
.map(lastModifiedStr -> DateTimeFormatter.RFC_1123_DATE_TIME.parse(lastModifiedStr, Instant::from))
.filter(currAlertsLastModified::isBefore)
.orElse(null)) != null)
{
currAlertsLastModified = alertsLastModified;
final RedAlertEvent redAlertEvent = JSON_MAPPER.readValue(
httpResponseBody,
RedAlertEvent.class
);
LOGGER.debug("Original event data: {}", redAlertEvent);
}
} catch (JsonParseException e)
{
LOGGER.error("JSON parsing error: {}", e.toString());
}
} catch (IOException e)
{
LOGGER.debug("Got exception: {}", e.toString());
sleep();
}
} catch (Throwable e)
{
LOGGER.fatal("Closing connection and exiting...", e);
}
}
#SuppressWarnings("unused")
private enum LanguageCode
{
HE,
EN,
AR,
RU;
}
private record RedAlertEvent(
int cat,
List<String> data,
String desc,
long id,
String title
)
{
}
private record Configuration(
boolean isMakeSound,
boolean isAlertAll,
boolean isDisplayResponse,
boolean isShowTestAlerts,
Duration timeout,
LanguageCode languageCode,
Level logLevel,
Set<String> districtsOfInterest
)
{
}
}
My dependencies:
<dependencies>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.19.0</version>
</dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
<version>3.4.4</version>
</dependency>
<dependency>
<groupId>info.picocli</groupId>
<artifactId>picocli</artifactId>
<version>4.6.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.13.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>2.13.4</version>
</dependency>
<dependency>
<groupId>io.github.ashr123</groupId>
<artifactId>time-measurement</artifactId>
<version>1.0.7</version>
</dependency>
</dependencies>
I'm using OpenJDK 19 but it happens also on OpenJDK 17.
This is the most minimal code I can show you that demonstrate the problem.
When I used IntelliJ's profiler tool, I saw and here is a live CPU and heap charts (over 3 days):
I think that my HttpClient doesn't close the subscriptions fast enough (i.e. it adds HttpBodySubscriberWrapper via jdk.internal.net.http.HttpClientImpl#registerSubscriber faster that it removes it via jdk.internal.net.http.HttpClientImpl#subscriberCompleted).
Why does it happen?
Why even though I've put the Closeable body in try-with-resources block doesn't remove the subscriber in time for the next loop?
How can I control the size of the subscriber field?
Can I demand in any way that there is only 1 "uncompleted" subscriber?
UPDATE
I've discovered that for HttpResponse.BodyHandlers#ofString it doesn't happen (subscriber list is always of size 1), so the question is why if I keep the body in try-with-resources it and close the InputStream, it doesn't removes the request's subscriber?
From HttpResponse.BodySubscribers#ofInputStream javadoc:
API Note:
To ensure that all resources associated with the corresponding exchange are properly released the caller must ensure to either read all bytes until EOF is reached, or call InputStream.close if it is unable or unwilling to do so. Calling close before exhausting the stream may cause the underlying HTTP connection to be closed and prevent it from being reused for subsequent operations.
So is it a bug? is it something else?
I am an oldtimer, but fairly new to spring and friends (and maybe i am getting old and rusty, dont be hard on me) . I have question which is very, very, similar to the one found in the Unable to Field identify bean named 'entityManagerFactory' as using Auto-wiring annotation to Repository and it currently beats me... I am unable to find an answer as to why the springboot application (which is a silly little thing i do on my spare time) doesnt seem to be able to autowire jpa repositiories as it should. I have (to the best of my knowledge) followed the instructions in the linked issue above (and removed any known direct hibernate dependencies. I am enclosing my pom file in this public question as well.
<?xml version="1.0" encoding="UTF-8"?>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.3.2.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>se.pointsofinterest</groupId>
<artifactId>poi-restlayer</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>restlayer</name>
<description>This module contains the rest layer for the application</description>
<properties>
<java.version>11</java.version>
</properties>
<dependencies>
<!--dependency>
<groupId>se.pointsofinterest</groupId>
<artifactId>dblayer</artifactId>
<version>1.0.0</version>
</dependency-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
<version>2.3.1.RELEASE</version>
</dependency>
<!--dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
</dependency-->
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
<version>1.4.200</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Possibly suspect depedencies below! -->
<dependency>
<groupId>org.springframework.restdocs</groupId>
<artifactId>spring-restdocs-mockmvc</artifactId>
<version>2.0.4.RELEASE</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
<version>4.13</version>
</dependency>
</dependencies>
<repositories>
<!-- Main maven repository -->
<repository>
<id>central</id>
<url>https://repo.maven.apache.org/maven2</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<!-- Repository where to store our local artifacts (an azure artifacts)! -->
<repository>
<id>joakimhansson</id>
<url>https://pkgs.dev.azure.com/joakimhansson/_packaging/joakimhansson/maven/v1</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
As far as I know I have no dependencies on hibernate in this pom.
I have a database layer which contains;
domain (all the entities)
repository (all relevant repository references)
Service which contains a service layer (which defines somewhat higher business logic for the data handling).
This is, to the best of my abilities to see very analogue to the issue referenced above.
The error i get is;
***************************
APPLICATION FAILED TO START
***************************
Description:
Field mapLocationRepository in se.poi.restlayer.dblayer.services.MapLocationService required a bean named 'entityManagerFactory' that could not be found.
The injection point has the following annotations:
- #org.springframework.beans.factory.annotation.Autowired(required=true)
I..e the autowire function does not work.
My application configuration;
package se.poi.restlayer;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import se.poi.dblayer.domain.*;
import se.poi.dblayer.repositories.AddressRepository;
import se.poi.dblayer.repositories.LinksRepository;
import se.poi.dblayer.repositories.MapLocationRepository;
import se.poi.dblayer.repositories.TagDefinitionsRepository;
/**
* * #author Joakim Hansson, 2020
*
*/
#Slf4j
#SpringBootApplication(scanBasePackages = {"se.poi.dblayer.repositories", "se.poi.dblayer.services"})
/*#Import(value={ConfigurationDbLayer.class})
#ComponentScan(basePackages={
"se.poi.dblayer",
"se.poi.dblayer.domain",
"se.poi.dblayer.repositories",
"se.poi.dblayer.services"})
*/
#EntityScan(basePackageClasses = {
Address.class,
Links.class,
MapLocation.class,
MapLocationTagDefinitionsRelation.class,
TagDefinitions.class
})
#EnableJpaRepositories(basePackageClasses = {
AddressRepository.class,
LinksRepository.class,
MapLocationRepository.class,
TagDefinitionsRepository.class})
#ComponentScan(basePackages={
"se.poi.restlayer.dblayer",
"se.poi.restlayer.dblayer.domain",
"se.poi.restlayer.dblayer.repositories",
"se.poi.restlayer.dblayer.services"})
public class Application {
public static void main (String[] args) {
log.info("Starting the main backend for the end customer for us.");
log.info("------------------------------------------------------");
//new BeanConfigurator();
//AutowireCapableBeanFactory f = context.getContext().getAutowireCapableBeanFactory();
//f.autowireBean(new AddressRepository());
SpringApplication.run(Application.class, args);
}
}
The repository;
package se.poi.restlayer.dblayer.repositories;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Repository;
import se.poi.dblayer.domain.Address;
#Repository
//#Component
public interface AddressRepository extends JpaRepository<Address, Long>{
}
The Service;
package se.poi.restlayer.dblayer.services;
import java.util.List;
import java.util.Optional;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Repository;
import lombok.extern.slf4j.Slf4j;
import se.poi.dblayer.domain.Address;
import se.poi.dblayer.domain.Links;
import se.poi.dblayer.domain.MapLocation;
import se.poi.dblayer.domain.TagDefinitions;
import se.poi.dblayer.repositories.AddressRepository;
import se.poi.dblayer.repositories.LinksRepository;
import se.poi.dblayer.repositories.MapLocationRepository;
import se.poi.dblayer.repositories.TagDefinitionsRepository;
//import org.springframework.web.context.annotation.ApplicationScope;
/**
* Demo backend that accepts up to 100 fishing spots. Data is shared with all
* users.
*/
#Slf4j
//#Service
#Component
public class MapLocationService {
//private List<MapLocation> spots = new ArrayList<MapLocation>();
#Autowired(required = true)
MapLocationRepository mapLocationRepository;
#Autowired(required = true)
TagDefinitionsRepository tagDefinitionsRepository;
#Autowired (required = true)
LinksRepository linksRepository;
#Autowired (required = true)
AddressRepository addressRepository;
public void checkRepositoryStatus () {
log.warn("checkRepositoryStatus");
if (mapLocationRepository == null) {
log.warn("Repository is == NULL!");
} else if (tagDefinitionsRepository == null) {
log.warn("tagDefnitionsRepository == NULL!!");
}
}
public void setMapLocationRepository (MapLocationRepository repository) {
this.mapLocationRepository = repository;
}
public Repository getMapLocationRepository() {
return (Repository) mapLocationRepository;
}
#PostConstruct
private void init() {
log.info("init called!");
}
/**
* Retrieves a list of map locations to the caller. As the
* map location is lazilly loaded the caller needs to instantiate each object
* using #link getManagedMapLocation
*
* #return list of map locations.
*/
#Transactional
public List<MapLocation> getAll() {
log.info("getAll");
//return Collections.unmodifiableList(spots);
return mapLocationRepository.findAll();
}
#Transactional
public MapLocation getManagedMapLocation (MapLocation mapLocation) {
Optional<MapLocation>mapLocationResponse = mapLocationRepository.findById(mapLocation.getId());
mapLocation = mapLocationResponse.get();
mapLocation = getAllLinks(mapLocation);
mapLocation = getAllAddresses(mapLocation);
mapLocation = getAllTags(mapLocation);
return mapLocation;
}
#Transactional
public MapLocation getAllAddresses (MapLocation mapLocation) {
log.info("getAllAddresses called!");
mapLocation.getAddresses();
log.info("Retrieved (" + mapLocation.getAddresses().size() + ") objects in list!");
return mapLocation;
}
#Transactional
public MapLocation getAllLinks (MapLocation mapLocation) {
log.info("getAllLinks called!");
mapLocation.getLinks();
log.info("Retrieved (" + mapLocation.getLinks().size() + ") objects in list!");
return mapLocation;
}
#Transactional
public MapLocation getAllTags (MapLocation mapLocation) {
mapLocation.getTagDefinitions();
return mapLocation;
}
/**
* The spot object is a non managed object as returned by this service from
* the getAllFunction.
*
* #param spot
*/
#Transactional
public MapLocation addSpot(MapLocation spot) {
log.info("addSpot called!");
MapLocation mapLocation = mapLocationRepository.save(spot);
for (Links i : spot.getLinks()) {
log.info("links: " + i.getLink() + " id = " + i.getId());
i.setMaplocation(mapLocation);
linksRepository.save(i);
}
for (Address i : spot.getAddresses()) {
log.info("Address: " + i.getAddressline1() + " id = " + i.getId());
i.setMaplocation(mapLocation);
addressRepository.save(i);
}
for (TagDefinitions i : spot.getTagDefinitions()) {log.info("Tagdefinition: " + i.getTag());}
return mapLocation;
}
#Transactional
public void delete (MapLocation mapLocation) {
/* Implementaion */
log.info("delete on maplocation is called!");
for (Links i: mapLocation.getLinks()) {
log.info("Removing link (" + i.getId() + ")");
linksRepository.delete(i);
}
for(Address i : mapLocation.getAddresses()) {
log.info("Deleting address (" + i.getId() + ")");
addressRepository.delete(i);
}
log.info ("remove mapLocation.getId (" + mapLocation.getId() + ")");
mapLocationRepository.delete(mapLocation);
/* * * */
}
/**
*
* #param name Marker name, which should be used on the map.
* #param links the links associated with the marker
* #param address the address to the nearest street address
* #param latitude
* #param longitude
* #param tags the list of tag (in string form) for the marker.
* #return
*/
public MapLocation prepareSpot (Long id,
String name,
List<Links> links,
List<Address> addresses,
double latitude,
double longitude,
List<TagDefinitions> tagDefinitions) {
/* Implementation */
MapLocation mapLocation = new MapLocation();
mapLocation.setId (id);
mapLocation.setName (name);
mapLocation.setLinks (links);
mapLocation.setAddresses (addresses);
mapLocation.setLatitude (latitude);
mapLocation.setLongitude (longitude);
mapLocation.setTagDefinitions (tagDefinitions);
mapLocation.setAddresses (addresses);
mapLocation.setLinks (links);
/* * * */
return mapLocation;
}
}
And a suitable domain entity;
package se.poi.restlayer.dblayer.domain;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinTable;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Simple data object representing a marker on a map.
*/
#Entity
#Data
#Table(name="MAP_LOCATION")
#AllArgsConstructor
#NoArgsConstructor
public class MapLocation implements Serializable {
/**
*
*/
private static final long serialVersionUID = -590067472197846904L;
#Id
#GeneratedValue(strategy=GenerationType.AUTO)
private Long id;
#Column(name="latitude", nullable = false)
private Double latitude;
#Column(name="longitude", nullable = false)
private Double longitude;
#Column(name="name", length = 128)
private String name;
#ManyToMany (fetch=FetchType.EAGER) //(mappedBy = "mapLocations")
#JoinTable(name="maplocations_tagdefinitions",
joinColumns= #JoinColumn(name="mapLocations"),
inverseJoinColumns = #JoinColumn(name="tagDefinitions"))
private List<TagDefinitions>tagDefinitions = new ArrayList<>();
#OneToMany(mappedBy="maplocation")
private List<Links> links;
#OneToMany(mappedBy="maplocation")
private List<Address> addresses;
public MapLocation(double latitude, double longitude, String name, List<TagDefinitions>tagDefinitions) {
this.latitude = latitude;
this.longitude = longitude;
this.name = name;
this.tagDefinitions = tagDefinitions;
/* * * */
}
}
The test that sets it off;
package se.poi.restlayer;
import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.document;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Test;
//import org.junit.jupiter.api.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.restdocs.AutoConfigureRestDocs;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import lombok.extern.slf4j.Slf4j;
import se.poi.restlayer.controller.GetTagsController;
/**
*
* #author fmanh
*/
#RunWith(SpringRunner.class)
#WebMvcTest(GetTagsController.class)
#AutoConfigureRestDocs(outputDir="target/snippets")
#Slf4j
public class WebLayerTest {
#Autowired
private MockMvc mockMvc;
#Test
public void testGetTags() throws Exception {
log.info ("mockMvc == " + ((this.mockMvc==null)?"NULL":"INSTANTIATED"));
log.info("KALLE");
this.mockMvc.perform (get("/retrievealltags")); //.
//andExpect(status().isOk()).
//andExpect(content().json("")).
//andDo(print()).
//andDo(document("/retrievealltags"));
}
}
The application.properties
server.port=8080
# Ensure application is run in Vaadin 14/npm mode
vaadin.compatibilityMode = false
logging.level.org.atmosphere = warn
#
# Settings for the internal H2
#
#spring.datasource.url = jdbc:h2:file:~/test
#spring.datasource.driverClassName = org.h2.Driver
#spring.datasource.username = sa
##spring.datasource.password =
#spring.jpa.databse-platform = org.hibernate.dialect.H2Dialect
#spring.h2.console.enabled = true
#spring.h2.console.path = /h2-console
#hibernate.dialect = H2
#
# Set up the postgres database
#
spring.datasource.url = jdbc:postgresql://localhost:5432/postgres
spring.datasource.username = postgres
spring.datasource.password = d1d4a5baa55f4f70a90e12bc95473833
spring.jpa.database-platform = org.hibernate.dialect.PostgreSQL94Dialect
spring.jpa.show-sql = true
spring.jpa.hibernate.ddl-auto = update
spring.jpa.hibernate.naming.implicit-strategy = org.hibernate.boot.model.naming.ImplicitNamingStrategyJpaCompliantImpl
spring.jpa.properties.hibernate.format_sql = true
spring.jpa.properties.hibernate.generate_statistics = true
logging.level.org.hibernate.type=trace
logging.level.org.hibernate.stat=debug
#spring.jpa.hibernate.ddl-auto=none
# Following values available;
# validate, update, create, create-drop, none
#server.port = 8443
#server.ssl.key-store-type = PKCS12
#server.ssl.key-store = classpath:keystore.p12
#server.ssl.key-store-password = Pur3Life
#server.ssl.key-alias = tomcat
#security.require-ssl = true
Any help is appreciated! I have tried in vain to google it (maybe my google fu is not up to par, or I have missed something obvious), if so feel free to point it out. A tar archive of the softwar
e can be obtained if you wish (there is no secrets here). Please help me grow a bit in wisdom here!
EDIT!
I realised that my description is not complete: My solution contains a restlayer which containsa the following controller;
package se.poi.restlayer.controller;
import java.util.ArrayList;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import se.poi.dblayer.ConfigurationDbLayer;
import se.poi.dblayer.domain.Address;
import se.poi.dblayer.domain.Links;
import se.poi.dblayer.domain.MapLocation;
import se.poi.dblayer.domain.TagDefinitions;
import se.poi.dblayer.repositories.AddressRepository;
import se.poi.dblayer.services.MapLocationService;
import se.poi.dblayer.services.TagDefinitionsService;
import se.poi.restlayer.model.AddressObject;
import se.poi.restlayer.model.LinkObject;
import se.poi.restlayer.model.MapLocationList;
import se.poi.restlayer.model.MapLocationObject;
import se.poi.restlayer.model.TagDefinitionObject;
import se.poi.restlayer.model.TagDefinitionsList;
/**
* The
* #author Joakim Hansson
*/
#RestController
//#EnableJpaRepositories(basePackages={"se.poi.dblayer.repositories"})
//#EntityScan(basePackages={"se.poi.dblayer.domain"})
//#ComponentScan(basePackages={"se.poi.dblayer.services", "se.poi.dblayer.repositories"})
#Slf4j
//#Import(value={ConfigurationDbLayer.class})
public class GetTagsController {
//#Autowired
//AddressRepository a;
#Autowired
TagDefinitionsService tagDefinitionService;
//#Autowired
MapLocationService mapLocationService;
//private TagDefinitionsService tagDefinitionService = new TagDefinitionsService ();
//private MapLocationService mapLocationService = new MapLocationService ();
#GetMapping("/retrievealltags")
public TagDefinitionsList retrieveAllTags () {
/* Implementation */
if (tagDefinitionService==null) {log.error ("tagDefinitionsService: NULL!");}
List<TagDefinitions> list = tagDefinitionService.getAllTagDefinitionsInFull();
TagDefinitionsList tagDefinitionsList = new TagDefinitionsList();
ArrayList<TagDefinitionObject> tagDefinitions = new ArrayList<TagDefinitionObject>();
for (TagDefinitions item : list) {
TagDefinitionObject tagDefinition = new TagDefinitionObject ();
tagDefinition.setId (item.getId());
tagDefinition.setDescription (item.getDescription());
tagDefinition.setTag (item.getTag());
tagDefinition.setParentId (null);
tagDefinitions.add (tagDefinition);
}
tagDefinitionsList.setTagDefinitions(tagDefinitions);
/* * * */
return tagDefinitionsList;
}
#GetMapping("/retrieveMarkers")
public MapLocationList retrieveMarkers () {
/* Implementation */
// Retrieve all the data from the service...
List<MapLocation> l = mapLocationService.getAll();
// Convert to...
MapLocationList mapLocationList = new MapLocationList ();
ArrayList<MapLocationObject> ll = new ArrayList<MapLocationObject> ();
for (MapLocation item: l) {
MapLocationObject mapLocationObject = new MapLocationObject ();
mapLocationObject.setId (item.getId ());
mapLocationObject.setLatitude (item.getLatitude ());
mapLocationObject.setLongitude (item.getLongitude ());
mapLocationObject.setName (item.getName ());
mapLocationObject.setLinks (copyLinksList (item.getLinks ()));
mapLocationObject.setAddresses (copyAddressList (item.getAddresses ()));
ll.add (mapLocationObject);
}
/* * * */
return mapLocationList;
}
/* Private functions
* **********************************************************************/
/**
* Copies data from the database model to the rest API model.
*
* #param links
* #return
*/
private List<LinkObject> copyLinksList (List<Links>links) {
/* Implementation */
ArrayList<LinkObject> ll = new ArrayList<LinkObject> ();
for (Links item: links) {
LinkObject linkObject = new LinkObject();
linkObject.setId (item.getId());
linkObject.setLink (item.getLink());
ll.add(linkObject);
}
/* * * */
return ll;
}
/**
*
* #param address
* #return
*/
private List<AddressObject> copyAddressList (List<Address>address) {
/* Implementation */
ArrayList<AddressObject> ll = new ArrayList<AddressObject> ();
for (Address item: address) {
AddressObject addressObject = new AddressObject();
addressObject.setId (item.getId ());
addressObject.setAddressline1 (item.getAddressline1 ());
addressObject.setAddressline2 (item.getAddressline2 ());
addressObject.setAddressline3 (item.getAddressline3 ());
addressObject.setCity (item.getCity ());
addressObject.setPostcode (item.getPostcode ());
ll.add(addressObject);
}
/* * * */
return ll;
}
}
This means that the application tries to instantiate a controller which contains an autowire annotation on a dblayer service, and this dblayer service in turn contains an autowire annotation on a repository. It is this chain which goes south. Sorry for the omission.
EDIT;
I am now experimenting widly; autowire simply doesnt work. SIGH!. The funny part is that I can see in my log files that 4 jpa entities are found, but I simply cant get Autowired to work with those repositories... Google seams to point to that this is a common problem, but there doesnt seem to be a clear cut solution.
You are using different versions for SpringBoot and JPA (that is wrong, you always let SpringBoot pull versions for all SpringBoot starters) - remove this line
<version>2.3.1.RELEASE</version>
from JPA dependency so that Spring Boot pulls the correct version from parent and let me know if the problem is still there.
I am developing an asynchronous mail server in spring boot using kafka.
I have written tests with embedded kafka which starts its own kafka topic in a random port and use it for testing.
When I started this application context is loading and its expecting for kafka cluster in my local. I need to stop application conext from loading.
I replicated the code from https://github.com/code-not-found/spring-kafka/blob/master/spring-kafka-unit-test-classrule/src/test/java/com/codenotfound/kafka/producer/SpringKafkaSenderTest.java which works absolutely fine. When I followed same style in my project, I can see actual apllication starting.
SpringKafkaSenderTest .java
package com.mailer.embeddedkafkatests;
import static org.junit.Assert.assertTrue;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
import com.mailer.model.Mail;
import com.mailer.producer.KafkaMessageProducer;
import com.mailer.serializer.MailSerializer;
#RunWith(SpringRunner.class)
#SpringBootTest
#DirtiesContext
public class SpringKafkaSenderTest {
private static final Logger LOGGER =
LoggerFactory.getLogger(SpringKafkaSenderTest.class);
private static String SENDER_TOPIC = "sender.t";
#Autowired
private KafkaMessageProducer sender;
private KafkaMessageListenerContainer<String, Mail> container;
private BlockingQueue<ConsumerRecord<String, Mail>> records;
#ClassRule
public static EmbeddedKafkaRule embeddedKafka =
new EmbeddedKafkaRule(1, true, SENDER_TOPIC);
#Before
public void setUp() throws Exception {
// set up the Kafka consumer properties
Map<String, Object> consumerProperties =
KafkaTestUtils.consumerProps("sender", "false",
embeddedKafka.getEmbeddedKafka());
consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, MailSerializer.class);
// create a Kafka consumer factory
DefaultKafkaConsumerFactory<String, Mail> consumerFactory =
new DefaultKafkaConsumerFactory<String, Mail>(
consumerProperties);//, new StringDeserializer(), new JsonDeserializer<>(Mail.class));
// set the topic that needs to be consumed
ContainerProperties containerProperties =
new ContainerProperties(SENDER_TOPIC);
// create a Kafka MessageListenerContainer
container = new KafkaMessageListenerContainer<>(consumerFactory,
containerProperties);
// create a thread safe queue to store the received message
records = new LinkedBlockingQueue<>();
// setup a Kafka message listener
container
.setupMessageListener(new MessageListener<String, Mail>() {
#Override
public void onMessage(
ConsumerRecord<String, Mail> record) {
LOGGER.debug("test-listener received message='{}'",
record.toString());
records.add(record);
}
});
// start the container and underlying message listener
container.start();
// wait until the container has the required number of assigned partitions
ContainerTestUtils.waitForAssignment(container,
embeddedKafka.getEmbeddedKafka().getPartitionsPerTopic());
}
#After
public void tearDown() {
// stop the container
container.stop();
}
#Test
public void testSend() throws InterruptedException {
// send the message
Mail mail = new Mail();
mail.setFrom("vinoth#local.com");
sender.sendMessage(mail);
Thread.sleep(4000);
// check that the message was received
ConsumerRecord<String, Mail> received =
records.poll(10, TimeUnit.SECONDS);
// Hamcrest Matchers to check the value
assertTrue(received.value().getFrom().equals(mail.getFrom()));
System.out.println(received.value().getFrom());
// assertThat(received, hasValue(mail));
// AssertJ Condition to check the key
// assertThat(received).has(key(null));
}
}
Why would you like to stop the spring context from loading? Isn't the purpose of this junit to test your spring application?
In any case just remove the #SpringBootTest annotation and the spring context will not load.
I see this is a tough question for Play. A lot of people asked that question, but still it is not clear how to get bytes from request body if content type is not set in Java.
There is a solution in Scala, but it is not working for my case.
I want to use Play to built a http mock server in a test in Java.
#BodyParser.Of(BodyParser.Raw.class) has no sense
package org.dan;
import org.junit.Test;
import play.mvc.BodyParser;
import play.mvc.Result;
import play.server.Server;
//import static play.mvc.Controller.request;
import static play.mvc.Results.ok;
import static play.routing.RoutingDsl.fromComponents;
import static play.server.Server.forRouter;
import static play.mvc.Http.Context.Implicit.request;
public class DemoPlayTest {
#Test
public void run() throws InterruptedException {
Server server = forRouter(
9001,
(components) ->
fromComponents(components)
.POST("/echo")
.routeTo(DemoPlayTest::action)
.build());
Thread.sleep(1111111);
}
#BodyParser.Of(BodyParser.Raw.class)
public static Result action() {
return ok("Gut: " + request().body().asRaw() + "\n");
}
}
Testing:
$ curl -v -X POST -d hello http://localhost:9001/echo
Gut: null
Dependencies:
<play.version>2.6.17</play.version>
<dependency>
<groupId>com.typesafe.play</groupId>
<artifactId>play-server_2.11</artifactId>
<version>${play.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.typesafe.play</groupId>
<artifactId>play-akka-http-server_2.11</artifactId>
<version>${play.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.typesafe.play</groupId>
<artifactId>play-java_2.11</artifactId>
<version>${play.version}</version>
<scope>test</scope>
</dependency>
package org.dan;
import org.junit.Test;
import play.mvc.Result;
import play.server.Server;
import static play.mvc.Controller.request;
import static play.mvc.Results.ok;
import static play.routing.RoutingDsl.fromComponents;
import static play.server.Server.forRouter;
public class DemoPlayTest {
#Test
public void run() throws InterruptedException {
Server server = forRouter(
9001,
(components) ->
fromComponents(components)
.POST("/echo")
.routeTo(DemoPlayTest::action)
.build());
Thread.sleep(1111111);
}
public static Result action() {
final String body = new String(request().body().asBytes().toArray());
return ok("Gut: " + body + "\n");
}
}
I am getting following Error:
Exception in thread "main" java.lang.AbstractMethodError
at org.apache.spark.internal.Logging$class.initializeLogIfNecessary(Logging.scala:99)
at org.apache.spark.streaming.kafka010.KafkaUtils$.initializeLogIfNecessary(KafkaUtils.scala:40)
at org.apache.spark.internal.Logging$class.log(Logging.scala:46)
at org.apache.spark.streaming.kafka010.KafkaUtils$.log(KafkaUtils.scala:40)
at org.apache.spark.internal.Logging$class.logWarning(Logging.scala:66)
at org.apache.spark.streaming.kafka010.KafkaUtils$.logWarning(KafkaUtils.scala:40)
at org.apache.spark.streaming.kafka010.KafkaUtils$.fixKafkaParams(KafkaUtils.scala:157)
at org.apache.spark.streaming.kafka010.DirectKafkaInputDStream.<init>(DirectKafkaInputDStream.scala:65)
at org.apache.spark.streaming.kafka010.KafkaUtils$.createDirectStream(KafkaUtils.scala:126)
at org.apache.spark.streaming.kafka010.KafkaUtils$.createDirectStream(KafkaUtils.scala:149)
at org.apache.spark.streaming.kafka010.KafkaUtils.createDirectStream(KafkaUtils.scala)
at com.spark.kafka.JavaDirectKafkaWordCount.main(JavaDirectKafkaWordCount.java:50)
18/05/29 18:05:43 INFO SparkContext: Invoking stop() from shutdown hook
18/05/29 18:05:43 INFO SparkUI: Stopped Spark web UI at
Code snippet:
I am writing a simple kafka - spark streaming code in eclipse to consume the messages from kafka broker using spark streaming. Below is the code
I am trying to execute respective code it working fine utill JavaInputDStream, after that i am getting error.
evey import is fine. can some one help in this
package com.spark.kafka;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import scala.Tuple2;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.api.java.*;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.apache.spark.streaming.Durations;
public final class JavaDirectKafkaWordCount {
private static final Pattern SPACE = Pattern.compile(" ");
public static void main(String[] args) throws Exception {
String brokers = "localhost:9092";
String topics = "sparktestone";
SparkConf sparkConf = new SparkConf().setAppName("JavaDirectKafkaWordCount").setMaster("local[*]");
JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, Durations.seconds(2));
Set<String> topicsSet = new HashSet<>(Arrays.asList(topics.split(",")));
Map<String, Object> kafkaParams = new HashMap<>();
kafkaParams.put("bootstrap.servers", brokers);
JavaInputDStream<ConsumerRecord<String, String>> messages = KafkaUtils.createDirectStream(jssc,
LocationStrategies.PreferConsistent(), ConsumerStrategies.Subscribe(topicsSet, kafkaParams));
JavaDStream<String> lines = messages.map(ConsumerRecord::value);
JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(SPACE.split(x)).iterator());
JavaPairDStream<String, Integer> wordCounts = words.mapToPair(s -> new Tuple2<>(s, 1))
.reduceByKey((i1, i2) -> i1 + i2);
wordCounts.print();
jssc.start();
jssc.awaitTermination();
}
}
In Pom i have added respective dependencies.
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.3.0</version>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-kafka-0-10 -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.10</artifactId>
<version>2.0.0</version>
</dependency>
I just have the same issue, in Spark 2.3 that method is abstract.
Conclusion, use Spark 2.2.
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.2.0</version>
<scope>provided</scope>
</dependency>