Apache Kafka: Configuring `ListSerde` for `TopologyTestDriver` - java

I am writing a unit test for a custom suppress. The class is called SupressProcessor. The unit test looks like:
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig
import no.statnett.observations.grid.flow.CorridorFlow.avro.v1.CorridorFlow
import my.stream.SuppressProcessor
import my.stream.TopicSerdeConfig
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.streams.StreamsConfig
import org.apache.kafka.streams.Topology
import org.apache.kafka.streams.TopologyTestDriver
import org.apache.kafka.streams.processor.api.ProcessorSupplier
import org.apache.kafka.streams.state.Stores
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.TestInstance
import testUtils.RecordFactory
import java.time.Duration
import java.util.Properties
#TestInstance(TestInstance.Lifecycle.PER_CLASS)
class SuppressProcessorTests {
// Following https://www.ru-rocker.com/2020/12/07/how-to-unit-test-kafka-streams-application-part-2-processor-api/
private lateinit var topology: Topology
private lateinit var testDriver: TopologyTestDriver
private val schemaRegistryScope = SuppressProcessorTests::class.java.name
private val schemaRegistryUrl = "mock://$schemaRegistryScope"
private val inputTopicName = "input_topic"
private val outputTopicName = "output_topic"
private val topicSerde = TopicSerdeConfig(inputTopicName, outputTopicName, schemaRegistryUrl)
private val sourceName = "source"
private val processorName = "suppress-processor"
private val stateStoreName = "suppress-processor-store"
private val recordFactory = RecordFactory()
#BeforeEach
fun setup() {
val config = Properties()
config[StreamsConfig.APPLICATION_ID_CONFIG] = "suppress-processor-test"
config[StreamsConfig.BOOTSTRAP_SERVERS_CONFIG] = ""
config[StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG] = topicSerde.stringKeySerde::class.java
config[StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG] = "org.apache.kafka.common.serialization.Serdes.ListSerde"
config[CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_INNER_CLASS] = topicSerde.inputValueSerde.javaClass
config[AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG] = schemaRegistryUrl
val storeBuilder = Stores
.timestampedKeyValueStoreBuilder(
Stores.inMemoryKeyValueStore(stateStoreName),
topicSerde.stringKeySerde, topicSerde.corridorFlowListSerde
)
.withLoggingDisabled()
topology = Topology()
topology.addSource(sourceName, topicSerde.stringKeySerde.deserializer(), topicSerde.corridorFlowListSerde.deserializer(), inputTopicName)
topology.addProcessor(processorName, ProcessorSupplier { SuppressProcessor(stateStoreName) },sourceName)
topology.addStateStore(storeBuilder, processorName)
topology.addSink("sink", outputTopicName, processorName)
testDriver = TopologyTestDriver(topology, config)
}
#Test
fun `Should schedule as expected`(){
val inputTopic = testDriver.createInputTopic(
inputTopicName,
topicSerde.stringKeySerde.serializer(),
topicSerde.corridorFlowListSerde.serializer()
)
val outputTopic = testDriver.createOutputTopic(
outputTopicName,
topicSerde.stringKeySerde.deserializer(),
topicSerde.corridorFlowListSerde.deserializer()
)
val corridorFlow = recordFactory.generateCorridorFlow()
val corridorFlows = listOf<CorridorFlow>(corridorFlow, corridorFlow)
inputTopic.pipeInput(corridorFlow.mrid, corridorFlows)
assertThat(outputTopic.isEmpty).isTrue
testDriver.advanceWallClockTime(Duration.ofSeconds(3))
assertThat(outputTopic.isEmpty).isFalse
}
}
The SupressProcessor looks like:
import net.logstash.logback.marker.Markers
import my.App
import my.avro.CorridorFlow
import org.apache.kafka.streams.processor.PunctuationType
import org.apache.kafka.streams.processor.api.ContextualProcessor
import org.apache.kafka.streams.processor.api.ProcessorContext
import org.apache.kafka.streams.processor.api.Record
import org.apache.kafka.streams.state.TimestampedKeyValueStore
import org.apache.kafka.streams.state.ValueAndTimestamp
import java.time.Duration
import java.time.Instant
class SuppressProcessor(private val stateStoreName: String) :
ContextualProcessor<String, MutableList<CorridorFlow>, String, MutableList<CorridorFlow>>() {
private lateinit var store: TimestampedKeyValueStore<String, MutableList<CorridorFlow>>
override fun init(context: ProcessorContext<String, MutableList<CorridorFlow>>?) {
super.init(context)
this.store = context().getStateStore(stateStoreName)
context().schedule(Duration.ofSeconds(1), PunctuationType.WALL_CLOCK_TIME) { schedule() }
}
override fun process(record: Record<String, MutableList<CorridorFlow>>) {
App.log.debug(
Markers.append("SuppressedValue", record.value()),
"Suppress received key=${record.key()} receivedAt=${context().currentSystemTimeMs()}"
)
store.put(record.key(), ValueAndTimestamp.make(record.value(), getExpirationTimestamp()))
}
private fun getExpirationTimestamp(timestamp: Long = context().currentSystemTimeMs()): Long =
timestamp + Duration.ofSeconds(2).toMillis()
private fun schedule(currentTimestamp: Long = context().currentSystemTimeMs()) {
val expiredKeys = mutableListOf<String>()
val iterator = store.all()
iterator.forEach { aggregate ->
App.log.info(aggregate.toString())
App.log.info("currentTimestamp: $currentTimestamp")
App.log.info("aggregateTimestamp: ${aggregate.value.timestamp()}")
if (aggregate.value.timestamp() >= currentTimestamp) return#forEach
App.log.info(aggregate.toString())
App.log.debug(
Markers.append("Suppress", aggregate.value),
"Suppress release key=${aggregate.key} releasedAt=${currentTimestamp}"
)
val recordTimestamp = Instant.parse(aggregate.value.value().first().timeInterval.end).toEpochMilli()
context().forward(Record(aggregate.key, aggregate.value.value(), recordTimestamp))
expiredKeys.add(aggregate.key)
}
iterator.close()
context().commit()
expiredKeys.forEach { store.delete(it) }
}
}
And the Serde configuration file looks like:
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import my.avro.BiddingZoneFlow;
import my.avro.CorridorFlow;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class TopicSerdeConfig {
public final String inputTopic;
public final String outputTopic;
public final String schemaRegistryUrl;
public final Serde<String> stringKeySerde;
public final SpecificAvroSerde<CorridorFlow> inputValueSerde;
public final Serde<List<CorridorFlow>> corridorFlowListSerde;
public final SpecificAvroSerde<BiddingZoneFlow> outputValueSerde;
public TopicSerdeConfig(String inputTopic, String outputTopic, String schemaRegistryUrl) {
this.inputTopic = inputTopic;
this.outputTopic = outputTopic;
this.schemaRegistryUrl = schemaRegistryUrl;
var serdeConfig = Collections.singletonMap(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
// Configure input serdes
stringKeySerde = Serdes.String();
stringKeySerde.configure(serdeConfig, true);
inputValueSerde = new SpecificAvroSerde<>();
inputValueSerde.configure(serdeConfig, false);
// Configure aggregation serdes
corridorFlowListSerde = Serdes.ListSerde(ArrayList.class, inputValueSerde);
// Configure output serdes
outputValueSerde = new SpecificAvroSerde<>();
outputValueSerde.configure(serdeConfig, false);
}
}
Note that this file is written in Java as opposed to Kotlin which is used in the other files.
When running the unit test, the following error message occurs:
rg.apache.kafka.common.config.ConfigException: Invalid value org.apache.kafka.common.serialization.Serdes.ListSerde for configuration default.value.serde: Class org.apache.kafka.common.serialization.Serdes.ListSerde could not be found.
at org.apache.kafka.common.config.ConfigDef.parseType(ConfigDef.java:744)
at org.apache.kafka.common.config.ConfigDef.parseValue(ConfigDef.java:490)
at org.apache.kafka.common.config.ConfigDef.parse(ConfigDef.java:483)
at org.apache.kafka.common.config.AbstractConfig.<init>(AbstractConfig.java:113)
at org.apache.kafka.common.config.AbstractConfig.<init>(AbstractConfig.java:146)
at org.apache.kafka.streams.StreamsConfig.<init>(StreamsConfig.java:1235)
at org.apache.kafka.streams.processor.internals.ClientUtils$QuietStreamsConfig.<init>(ClientUtils.java:55)
at org.apache.kafka.streams.TopologyTestDriver.<init>(TopologyTestDriver.java:320)
at org.apache.kafka.streams.TopologyTestDriver.<init>(TopologyTestDriver.java:299)
at org.apache.kafka.streams.TopologyTestDriver.<init>(TopologyTestDriver.java:275)
at my.SuppressProcessorTests.setup(SuppressProcessorTests.kt:66)
Any idea on how to configure ListSerde for unit tests using TopologyTestDriver?
EDIT: Added pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>parent-poms</groupId>
<artifactId>parent</artifactId>
<version>7.0.1</version>
<relativePath/> <!-- http://maven.apache.org/ref/3.0.3/maven-model/maven.html#class_parent -->
</parent>
<groupId>application</groupId>
<artifactId>flow-application</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
<name>flow-application</name>
<description>flow-application</description>
<properties>
<ktor.version>2.2.1</ktor.version>
<confluent.version>7.3.0</confluent.version>
<kafka.version>3.3.1</kafka.version>
<prometeus.version>1.10.2</prometeus.version>
<avro.version>1.11.1</avro.version>
<observations-avro.version>2.0.12</observations-avro.version>
<logback-classic.version>1.4.5</logback-classic.version>
<logstash-logback-encoder.version>7.2</logstash-logback-encoder.version>
<slf4j.version>2.0.6</slf4j.version>
<!-- Test -->
<junit-jupiter.version>5.9.1</junit-jupiter.version>
<kotlinx-coroutines-debug.version>1.6.4</kotlinx-coroutines-debug.version>
<assertj-core.version>3.23.1</assertj-core.version>
</properties>
<dependencies>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
<version>${kotlin.version}</version>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-schema-registry-client</artifactId>
<version>${confluent.version}</version>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>${confluent.version}</version>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-streams-avro-serde</artifactId>
<version>${confluent.version}</version>
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-core</artifactId>
<version>${prometeus.version}</version>
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-registry-prometheus</artifactId>
<version>${prometeus.version}</version>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>${avro.version}</version>
</dependency>
<dependency>
<groupId>no.statnett.schemas.business</groupId>
<artifactId>observations-avro</artifactId>
<version>${observations-avro.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-server-core-jvm</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-server-netty-jvm</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-server-metrics-jvm</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-server-content-negotiation-jvm</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-serialization-gson-jvm</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback-classic.version}</version>
</dependency>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>${logstash-logback-encoder.version}</version>
</dependency>
<!-- Testing -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams-test-utils</artifactId>
<version>${kafka.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-server-tests-jvm</artifactId>
<version>${ktor.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlinx</groupId>
<artifactId>kotlinx-coroutines-debug</artifactId>
<version>${kotlinx-coroutines-debug.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<version>${assertj-core.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

The solution was to change the config of the TopologyTestDriver to the following
val config = Properties()
config[StreamsConfig.APPLICATION_ID_CONFIG] = "suppress-processor-test"
config[StreamsConfig.BOOTSTRAP_SERVERS_CONFIG] = ""
config[StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG] = topicSerde.stringKeySerde::class.java
config[StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG] = topicSerde.corridorFlowListSerde.javaClass
config[CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_TYPE_CLASS] = topicSerde.inputValueSerde.javaClass
config[CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_INNER_CLASS] = topicSerde.inputValueSerde.javaClass
config[AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG] = schemaRegistryUrl

Related

MessageMessageBodyWriter not found for media type=application/xml, type=class java.util.ArrayList

I am trying to execute a very simple REST API using JAXrs. I have been following JetBrains tutorial and I'm stuck at the point where the API needs to return Reponse/XML. The java version used here is 1.8 and the Tomcat is 10.0.
I Guess there is some dependencies missing. Can someone help?
Here are my code snippets:
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.surender</groupId>
<artifactId>messenger</artifactId>
<packaging>war</packaging>
<version>0.0.1-SNAPSHOT</version>
<name>messenger</name>
<build>
<finalName>messenger</finalName>
<plugins>
<plugin>
<artifactId>maven-war-plugin</artifactId>
<version>3.2.2</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<inherited>true</inherited>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.glassfish.jersey</groupId>
<artifactId>jersey-bom</artifactId>
<version>${jersey.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>jakarta.servlet</groupId>
<artifactId>jakarta.servlet-api</artifactId>
<version>5.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.activation/activation -->
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jaxb/jaxb-runtime -->
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
<version>2.3.0-b170127.1453</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<!-- use the following artifactId if you don't need servlet 2.x compatibility -->
<!-- artifactId>jersey-container-servlet</artifactId -->
</dependency>
<dependency>
<groupId>org.glassfish.jersey.inject</groupId>
<artifactId>jersey-hk2</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-jaxb</artifactId>
<version>3.0.2</version>
</dependency>
<dependency>
<groupId>jakarta.xml.bind</groupId>
<artifactId>jakarta.xml.bind-api</artifactId>
<version>4.0.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-impl</artifactId>
<version>4.0.0</version>
<scope>runtime</scope>
</dependency>
<!-- uncomment this to get JSON support <dependency> <groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-binding</artifactId> </dependency> -->
</dependencies>
<properties>
<jersey.version>3.1.0-M2</jersey.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
</project>
Model Class
package org.surender.model;
import javax.xml.bind.annotation.XmlRootElement;
import jakarta.xml.bind.annotation.XmlElement;
#XmlRootElement(name = "Surender")
public class MessagesModel {
#XmlElement(name = "Message ID")
public int messageId;
public String messageTxt;
public String messageLike;
public String messageComment;
public MessagesModel(int messageId, String messageTxt, String messageLike, String messageComment) {
super();
System.out.println("Objects getting created");
this.messageId = messageId;
this.messageTxt = messageTxt;
this.messageLike = messageLike;
this.messageComment = messageComment;
}
public MessagesModel() {
}
public int getMessageId() {
return messageId;
}
public void setMessageId(int messageId) {
this.messageId = messageId;
}
public String getMessageTxt() {
return messageTxt;
}
public void setMessageTxt(String messageTxt) {
this.messageTxt = messageTxt;
}
public String getMessageLike() {
return messageLike;
}
public void setMessageLike(String messageLike) {
this.messageLike = messageLike;
}
public String getMessageComment() {
return messageComment;
}
public void setMessageComment(String messageComment) {
this.messageComment = messageComment;
}
}
Message Service
package org.surender.service;
import java.util.ArrayList;
import java.util.List;
import org.surender.model.MessagesModel;
public class MessagesService {
public List<MessagesModel> getMessages() {
List<MessagesModel> messagesList = new ArrayList<MessagesModel>();
messagesList.add(new MessagesModel(1,"Hey There !","2","Hello"));
messagesList.add(new MessagesModel(1,"Hola Amigo!","2","Hello"));
return messagesList;
}
public MessagesService() {
}
}
MessageResource
package org.surender.messenger;
import java.util.ArrayList;
import java.util.List;
import org.surender.model.MessagesModel;
import org.surender.service.MessagesService;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.GenericEntity;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
#Path("messages")
public class Messages {
MessagesService msgService = new MessagesService();
#GET
#Produces(MediaType.APPLICATION_XML)
public Response getMessages() {
System.out.println("Hi... I'm here one");
GenericEntity<List<MessagesModel>> entity = new GenericEntity<List<MessagesModel>>(msgService.getMessages()) {};
return Response.ok(entity).build();
}
}

Warning message : Specifying types in document index requests is deprecated

I implement ElasticSearchConsumer class program which is supposed to return the Id document. The running program desplay a warning message and return just twitter :
déc. 23, 2020 9:41:10 AM org.elasticsearch.client.RestClient logResponse
WARNING: request [POST https://kafka-course-6054260476.us-east-1.bonsaisearch.net:443/twitter/tweets?timeout=1m] returned 1 warnings: [299 Elasticsearch-7.2.0-508c38a "[types removal] Specifying types in document index requests is deprecated, use the typeless endpoints instead (/{index}/_doc/{id}, /{index}/_doc, or /{index}/_create/{id})."]
[main] INFO com.gihub.simplesteph.kafka.tutorial3.ElasticSearchConsumer - twitter
Process finished with exit code 0
This is the code :
package com.gihub.simplesteph.kafka.tutorial3;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class ElasticSearchConsumer {
public static RestHighLevelClient createClient(){
// replace with your own credentials
String hostname = "kafka-course-6054260476.us-east-1.bonsaisearch.net";
String username = "48h3frssnm";
String password = "8iliybmly0";
//don't do if you run a local ES
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password));
RestClientBuilder builder = RestClient.builder(
new HttpHost(hostname, 443, "https"))
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
#Override
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpAsyncClientBuilder) {
return httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
});
RestHighLevelClient client = new RestHighLevelClient(builder);
return client;
}
public static void main(String[] args) throws IOException {
Logger logger = LoggerFactory.getLogger(ElasticSearchConsumer.class.getName());
RestHighLevelClient client = createClient();
String jsonString = "{ \"foo\": \"bar\" }";
IndexRequest indexRequest = new IndexRequest("twitter", "tweets" ).source(jsonString, XContentType.JSON);
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
String id = indexResponse.getIndex();
logger.info(id);
//close the client gracefully
client.close();
}
}
This is the pom.xml file :
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>kafka-beginners-course</artifactId>
<groupId>org.example</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>kafka-consumer-elasticsearch</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.elasticsearch.client/elasticsearch-rest-high-level-client -->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.10.1</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-client</artifactId>
<version>7.10.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-simple -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.25</version>
<!-- <scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpasyncclient</artifactId>
<version>4.1.4</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore-nio</artifactId>
<version>4.4.14</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.13</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.14</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.15</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
</dependency>
</dependencies>
</project>
You can use likewise its removed warning message.
IndexRequest indexRequest = new IndexRequest("twitter", "_doc",tweetId).
The warning does not related to what returned, it is related to deprecated constructor for IndexRequest() and you can ignore it or should not pass type argument(second argument) to constructor.
you retrieved the index name by getIndex() method but you should retrieve the Id by getId() method.

Mysql client NoSuchMethodError

All of a sudden I receive this message when I try to deploy my Vertx application:
GRAVE: Unhandled exception java.lang.NoSuchMethodError:
'void io.vertx.sqlclient.impl.SocketConnectionBase.(io.vertx.core.impl.NetSocketInternal,
boolean, int, int, int, io.vertx.core.Context)' at
io.vertx.mysqlclient.impl.MySQLSocketConnection.(MySQLSocketConnection.java:46)
at
io.vertx.mysqlclient.impl.MySQLConnectionFactory.lambda$connect$0(MySQLConnectionFactory.java:115)
at io.vertx.core.impl.FutureImpl.tryComplete(FutureImpl.java:131)
Here's the dependencies of my pom file:
<dependencies>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-auth-jwt</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-junit5</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-core</artifactId>
<version>4.0.0-milestone3</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-mysql-client</artifactId>
<version>4.0.0-milestone3</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-junit5</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web-templ-freemarker</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-service-proxy</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-codegen</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
And this is the code that deploys the HTTP server and the database:
#Override
public void start(Promise<Void> promise) throws Exception {
// Deploy the database
Promise<String> dbVerticleDeployment = Promise.promise();
vertx.deployVerticle(new DatabaseVerticle(), dbVerticleDeployment);
// When the database has been deployed, proceed with the server
dbVerticleDeployment.future().compose(s -> {
// Deploy the HTTPS server
Promise<String> httpVerticleDeployment = Promise.promise();
vertx.deployVerticle(new HttpsServerVerticle(), httpVerticleDeployment);
// Return the future to notify the completion
return httpVerticleDeployment.future();
}).setHandler(stringAsyncResult -> {
// Error handling
if (stringAsyncResult.succeeded())
promise.complete();
else
promise.fail(stringAsyncResult.cause());
});
}
I don't think that the error is on the code but it's something related to maven imports. I probably have messed up versions?
Here's the database
package com.rosanna.mkscoreboards.database;
import com.rosanna.mkscoreboards.database.service.DatabaseService;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Promise;
import io.vertx.mysqlclient.MySQLConnectOptions;
import io.vertx.mysqlclient.MySQLPool;
import io.vertx.serviceproxy.ServiceBinder;
import io.vertx.sqlclient.PoolOptions;
import java.util.HashMap;
public class DatabaseVerticle extends AbstractVerticle {
// Database connection constants
private static final String HOST_NAME = "localhost";
private static final String DATABASE_NAME = "mkscoreboards";
private static final String DB_USERNAME = "root";
private static final String DB_PASSWORD = "temp_pass";
private static final int MAX_POOL_SIZE = 20;
// Queries
private static HashMap<SqlQuery, String> queries = new HashMap<>();
// Event but identifier
public static final String MKSCOREBOARDS_QUEUE = "mkscoreboards.queue";
#Override
public void start(Promise<Void> promise) throws Exception {
// Connection setup
var connectOptions = new MySQLConnectOptions()
.setPort(3306)
.setHost(HOST_NAME)
.setDatabase(DATABASE_NAME)
.setUser(DB_USERNAME)
.setPassword(DB_PASSWORD);
var poolOptions = new PoolOptions().setMaxSize(MAX_POOL_SIZE);
// Load the queries
initQueries();
// Create the pooled client
var client = MySQLPool.pool(vertx, connectOptions, poolOptions);
DatabaseService.create(client, queries, ready -> {
if (ready.succeeded()) {
var binder = new ServiceBinder(vertx);
binder.setAddress(MKSCOREBOARDS_QUEUE).register(DatabaseService.class, ready.result());
promise.complete();
} else {
promise.fail(ready.cause());
}
});
}
private void initQueries() {
if (queries.size() == 0) {
queries.put(
SqlQuery.LIST_AVAILABLE_GAMES,
"SELECT * FROM games;"
);
}
}
}
Here's the database service, taken from "Gentle guide to Vertx application"
#ProxyGen
#VertxGen
public interface DatabaseService {
#GenIgnore
static DatabaseService create(MySQLPool dbClient, HashMap<SqlQuery, String> sqlQueries, Handler<AsyncResult<DatabaseService>> readyHandler) {
return new DatabaseServiceImpl(sqlQueries, dbClient, readyHandler);
}
#GenIgnore
static DatabaseService createProxy(Vertx vertx, String address) {
return new DatabaseServiceVertxEBProxy(vertx, address);
}
#Fluent
DatabaseService listAllGames(Handler<AsyncResult<JsonArray>> resultHandler);
}
and
public class DatabaseServiceImpl implements DatabaseService {
private final HashMap<SqlQuery, String> sqlQueries;
private final MySQLPool client;
public DatabaseServiceImpl(HashMap<SqlQuery, String> sqlQueries, MySQLPool client, Handler<AsyncResult<DatabaseService>> readyHandler) {
this.sqlQueries = sqlQueries;
this.client = client;
client.getConnection(result -> {
if (result.failed()) {
readyHandler.handle(Future.failedFuture(result.cause()));
} else {
readyHandler.handle(Future.succeededFuture(this));
}
});
}
#Override
public DatabaseService listAllGames(Handler<AsyncResult<JsonArray>> resultHandler) {
//TODO
return null;
}
}
I have ended up with the fact that this error is related to a bug in vert.x; I have now this error:
java.lang.NoClassDefFoundError: io/vertx/core/impl/NetSocketInternal
at
io.vertx.mysqlclient.impl.MySQLConnectionFactory.lambda$connect$0(MySQLConnectionFactory.java:114)
And the pom is now the following (compare it with the pom in the question for comparing).
<dependencies>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-auth-jwt</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-junit5</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-core</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-mysql-client</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-junit5</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web-templ-freemarker</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-service-proxy</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-codegen</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
There must be issues with vertx itself because all versions are the same, I am using vertx 4
use
<!-- https://mvnrepository.com/artifact/io.vertx/vertx-mysql-client -->
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-mysql-client</artifactId>
<version>4.1.2</version>
</dependency>
or the latest
can you check that the various versions of the vertx components you are using are the same ?

Thymeleaf custom dialect doesn't work

My custom dialect with processor doesn't parse any value and I don't know why. In generated view there is nothing in place where ${content} should be and after changing tag to th:text it appears. I'm using Spring Boot v1.5.9.RELEASE, Spring v4.3.13.RELEASE
pom.xml dependencies (it's submodule)
<properties>
<h2.version>1.4.194</h2.version>
<java-version>1.8</java-version>
<org.thymeleaf-version>3.0.9.RELEASE</org.thymeleaf-version>
<org.thymeleaf.extras-version>3.0.0.RELEASE</org.thymeleaf.extras-version>
<thymeleaf-layout-dialect.version>2.1.2</thymeleaf-layout-dialect.version>
</properties>
<dependencies>
<dependency>
<groupId>org.thymeleaf</groupId>
<artifactId>thymeleaf</artifactId>
<version>${org.thymeleaf-version}</version>
</dependency>
<dependency>
<groupId>org.thymeleaf</groupId>
<artifactId>thymeleaf-spring4</artifactId>
<version>${org.thymeleaf-version}</version>
</dependency>
<dependency>
<groupId>nz.net.ultraq.thymeleaf</groupId>
<artifactId>thymeleaf-layout-dialect</artifactId>
<version>${thymeleaf-layout-dialect.version}</version>
</dependency>
<dependency>
<groupId>org.thymeleaf.extras</groupId>
<artifactId>thymeleaf-extras-java8time</artifactId>
<version>${org.thymeleaf.extras-version}</version>
</dependency>
<!--WebJars-->
<dependency>
<groupId>org.webjars</groupId>
<artifactId>bootstrap</artifactId>
<version>3.3.7</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>jquery</artifactId>
<version>3.2.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</dependency>
<!--database-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-java8</artifactId>
<version>${hibernate.version}</version>
</dependency>
</dependencies>
LineSeparatorProcessor.java
public class LineSeparatorProcessor extends AbstractAttributeTagProcessor {
private static final String ATTR_NAME = "lstext";
private static final int PRECEDENCE = 10000;
public LineSeparatorProcessor(final String dialectPrefix) {
super(
TemplateMode.HTML,
dialectPrefix,
null,
false,
ATTR_NAME,
true,
PRECEDENCE,
true);
}
protected void doProcess(
final ITemplateContext context, final IProcessableElementTag tag,
final AttributeName attributeName, final String attributeValue,
final IElementTagStructureHandler structureHandler) {
final IEngineConfiguration configuration = context.getConfiguration();
final IStandardExpressionParser parser =
StandardExpressions.getExpressionParser(configuration);
final IStandardExpression expression = parser.parseExpression(context, attributeValue);
final String value = (String) expression.execute(context);
structureHandler.setBody(
HtmlEscape.escapeHtml5Xml(value).replace(System.getProperty("line.separator"), "<br />"),
false);
}
}
MyDialect.java
public class MyDialect extends AbstractProcessorDialect {
public MyDialect() {
super(
"MyDialect",
"mydialect",
13000);
}
public Set<IProcessor> getProcessors(final String dialectPrefix){
final Set<IProcessor> processors = new HashSet<>();
processors.add( new LineSeparatorProcessor(dialectPrefix) );
return processors;
}
}
ThymeleafConfiguration.java
#Configuration
public class ThymleafConfiguration {
#Bean
public MyDialect myDialect() {
return new MyDialect();
}
}
view.html
<span mydialect:lstext="${content}" ></span>
You need to add the dialect to the instance of the TemplateEngine. For example:
#Bean
public SpringTemplateEngine templateEngine(){
SpringTemplateEngine templateEngine = new SpringTemplateEngine();
templateEngine.setEnableSpringELCompiler(true);
templateEngine.setTemplateResolver(templateResolver());
templateEngine.addDialect(new MyDialect());
return templateEngine;
}
You can find this documented in the Say Hello! Extending Thymeleaf in 5 minutes guide.

Spark : java.lang.NoClassDefFoundError: com/mongodb/hadoop/MongoInputFormat

I'm trying to read data from mongodb with spark using mongo-hadoop connector.
I tried different versions of hadoop-mongo connector jar but still getting this error.
There's no error during the compile time
What can i do to resolve this??
Thanks in advance.
Exception in thread "main" java.lang.NoClassDefFoundError: com/mongodb/hadoop/MongoInputFormat
at com.geekcap.javaworld.wordcount.Mongo.main(Mongo.java:47)
Caused by: java.lang.ClassNotFoundException: com.mongodb.hadoop.MongoInputFormat
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 1 more
My Code
import com.mongodb.hadoop.BSONFileOutputFormat;
import com.mongodb.hadoop.MongoInputFormat;
import com.mongodb.hadoop.MongoOutputFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Queue;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.bson.BSONObject;
public class MongoTest {
// Set configuration options for the MongoDB Hadoop Connector.
public static void main(String[] args) {
SparkConf conf = new SparkConf().setMaster("local").setAppName("App1");
JavaSparkContext sc = new JavaSparkContext(conf);
Configuration mongodbConfig;
mongodbConfig = new Configuration();
mongodbConfig.set("mongo.job.input.format", "com.mongodb.hadoop.MongoInputFormat");
mongodbConfig.set("mongo.input.uri","mongodb://localhost:27017/MyCollectionName.collection");
JavaPairRDD<Object, BSONObject> documents = sc.newAPIHadoopRDD(
mongodbConfig, // Configuration
MongoInputFormat.class, // InputFormat: read from a live cluster.
Object.class, // Key class
BSONObject.class // Value class
);
documents.saveAsTextFile("b.txt");
}
}
pom.xml dependencies:
<!-- Import Spark -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>1.4.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver</artifactId>
<version>3.0.4</version>
</dependency>
<dependency>
<groupId>hadoopCom</groupId>
<artifactId>com.sample</artifactId>
<version>1.0</version>
<scope>system</scope>
<systemPath>/home/sys6002/NetBeansProjects/WordCount/lib/hadoop-common-2.7.1.jar</systemPath>
</dependency>
<dependency>
<groupId>hadoopCon1</groupId>
<artifactId>com.sample1</artifactId>
<version>1.0</version>
<scope>system</scope>
<systemPath>/home/sys6002/Downloads/mongo-hadoop-core-1.3.0.jar</systemPath>
</dependency>
</dependencies>
After several trails & changes, got this worked.
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>1.5.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>1.5.1</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.14</version>
</dependency>
<dependency>
<groupId>org.mongodb.mongo-hadoop</groupId>
<artifactId>mongo-hadoop-core</artifactId>
<version>1.4.1</version>
</dependency>
</dependencies>
Java code
Configuration conf = new Configuration();
conf.set("mongo.job.input.format", "com.mongodb.hadoop.MongoInputFormat");
conf.set("mongo.input.uri", "mongodb://localhost:27017/databasename.collectionname");
SparkConf sconf = new SparkConf().setMaster("local").setAppName("Spark UM Jar");
JavaRDD<User> UserMaster = sc.newAPIHadoopRDD(conf, MongoInputFormat.class, Object.class, BSONObject.class)
.map(new Function<Tuple2<Object, BSONObject>, User>() {
#Override
public User call(Tuple2<Object, BSONObject> v1) throws Exception {
//return User
}
}

Categories

Resources