I'm always getting the following error.Can somebody help me please?
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/Logging
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at com.datastax.spark.connector.japi.DStreamJavaFunctions.<init>(DStreamJavaFunctions.java:24)
at com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions(CassandraStreamingJavaUtil.java:55)
at SparkStream.main(SparkStream.java:51)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.Logging
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 20 more
When I compile the following code. I've searched the web but didn't find a solution. I've got the error when I added the saveToCassandra.
import com.datastax.spark.connector.japi.CassandraStreamingJavaUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapToRow;
/**
* Created by jonas on 10/10/16.
*/
public class SparkStream implements Serializable{
public static void main(String[] args) throws Exception{
SparkConf conf = new SparkConf(true)
.setAppName("TwitterToCassandra")
.setMaster("local[*]")
.set("spark.cassandra.connection.host", "127.0.0.1")
.set("spark.cassandra.connection.port", "9042");
;
JavaSparkContext sc = new JavaSparkContext(conf);
JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(5000));
Map<String, String> kafkaParams = new HashMap<>();
kafkaParams.put("bootstrap.servers", "localhost:9092");
Set<String> topics = Collections.singleton("Test");
JavaPairInputDStream<String, String> directKafkaStream = KafkaUtils.createDirectStream(
ssc,
String.class,
String.class,
kafka.serializer.StringDecoder.class,
kafka.serializer.StringDecoder.class,
kafkaParams,
topics
);
JavaDStream<Tweet> createTweet = directKafkaStream.map(s -> createTweet(s._2));
CassandraStreamingJavaUtil.javaFunctions(createTweet)
.writerBuilder("mykeyspace", "rawtweet", mapToRow(Tweet.class))
.saveToCassandra();
ssc.start();
ssc.awaitTermination();
}
public static Tweet createTweet(String rawKafka){
String[] splitted = rawKafka.split("\\|");
Tweet t = new Tweet(splitted[0], splitted[1], splitted[2], splitted[3]);
return t;
}
}
My pom is the following.
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.company</groupId>
<artifactId>Sentiment</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>twitter4j.org</id>
<name>twitter4j.org Repository</name>
<url>http://twitter4j.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.scala-lang/scala-library -->
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.11.8</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.datastax.spark/spark-cassandra-connector_2.10 -->
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>1.6.2</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.9.0.0</version>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-core</artifactId>
<version>[4.0,)</version>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-stream</artifactId>
<version>4.0.4</version>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-async</artifactId>
<version>4.0.4</version>
</dependency>
</dependencies>
</project>
org.apache.spark.Logging is available in Spark version 1.5.2 or lower version. It is not in the 2.0.0. Pls change versions as follows
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>1.6.2</version>
</dependency>
The error is because you are using Spark 2.0 libraries with the connector from Spark 1.6 (which looks for the Spark 1.6 logging class. Use the 2.0.5 version of the connector.
It's because of the missing of org.apache.spark.Logging class since 1.5.2, just like everybody say. (Only org.apache.spark.internal.Logging exists in later version... )
But it seems none of the solution against maven can solve this dependency, so I just try to add this class to lib manually. Here is my way to fixed the problem:
Package the scala org.apache.spark.internal.Logging into a public jar. Or download it from https://raw.githubusercontent.com/swordsmanliu/SparkStreamingHbase/master/lib/spark-core_2.11-1.5.2.logging.jar (Thanks to this host.)
Move the jar into your spark cluster's jars directory.
Submit your project again, wish it will help u.
I got the solution by changing above mention jar.
Initially, I was having degraded jar for spark-kafka-streaming:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.1.1</version></dependency>
I also removed multilple sl4j-log4j.jars and log4j.jars which I had added externally from spark and kafka jar library.
if you're using IntelliJ, just check the 'include dependencies with provided scope' box and that will fix the issue for you without mucking around with the pom or manually downloading the files.
Download spark-core_2.11-1.5.2.logging.jar and use as --jar option
spark-submit --class com.SentimentTwiteer --packages "org.apache.spark:spark-streaming-twitter_2.11:1.6.3" --jars /root/Desktop/spark-core_2.11-1.5.2.logging.jar /root/Desktop/SentimentTwiteer.jar consumerKey consumerSecret accessToken accessTokenSecret yoursearchTag
https://github.com/sinhavicky4/SentimentTwiteer
One reason that may cause this problem is lib and class conflict.
I faced this problem and solved it using some maven exclusions:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.0.0</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.0.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>2.0.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
this pom.xml is resolve my issue:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.6.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.6.1</version>
</dependency>
Downloaded the jar and use --jars in spark-submit ,worked for me
spark-submit --class com.SentimentTwiteer --packages "org.apache.spark:spark-streaming-twitter_2.11:1.6.3" --jars /root/Desktop/spark-core_2.11-1.5.2.logging.jar /root/Desktop/SentimentTwiteer.jar XX XX XX XX
Download the below Jar ,and it into you library ,and it will work as expected.
https://raw.githubusercontent.com/swordsmanliu/SparkStreamingHbase/master/lib/spark-core_2.11-1.5.2.logging.jar
its a version issue try with latest version
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.1.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.1.0</version>
</dependency>
Missing logging Jar in your dependency jars list. Try to download "spark-core_2.11-1.5.2.logging" jar from mvn repository, then add it as external jar to your spark project, you don't get "java.lang.NoClassDefFoundError: org/apache/spark/Logging" error.
Based on scala version you can download the jar{2.10,2.11,etc}.
Related
When I start a simple akka-http-server, I get an exception. I searched all over the internet and found two solutions which are not working for me:
Mixing scala versions.(I had check my pom 2.11 scala without mix)
Change scala actor version.(I tried almost ten versions with 2.11 scala)
I have no idea how to resolve this problem, kindly tell me how to resolve it.
application.conf
akka {
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
enabled-transports = ["akka.remote.netty.tcp"]
netty.tcp {
hostname = "127.0.0.1"
port = 2552
}
}
}
main class
package com.akkademo;
import akka.actor.ActorSystem;
import akka.actor.Props;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
public class Main {
public static void main(String[] args) {
ActorSystem actorSystem = ActorSystem.create("akkademo", ConfigFactory.load());
Config config = actorSystem.settings().config();
actorSystem.actorOf(Props.create(AkkademoDb.class), "akkademo-db");
}
}
pom.xml
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jdk.version>1.8</jdk.version>
<scala>2.11</scala>
<scala.version>2.11.8</scala.version>
<akka.version>2.4.14</akka.version>
<akka.remote>2.3.6</akka.remote>
<akka.compat>0.7.0</akka.compat>
<akka.test>2.5.9</akka.test>
</properties>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-testkit_${scala}</artifactId>
<version>${akka.test}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_${scala}</artifactId>
<version>${akka.remote}</version>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-java8-compat_${scala}</artifactId>
<version>${akka.compat}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_${scala}</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_${scala}</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
</dependencies>
<repositories>
<repository>
<id>aliyun</id>
<name>aliyun</name>
<url>http://maven.aliyun.com/nexus/content/groups/public</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${jdk.version}</source>
<target>${jdk.version}</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
Exception in thread "main" java.lang.NoSuchMethodError:
akka.actor.LocalActorRefProvider.log()Lakka/event/LoggingAdapter;
at akka.remote.RemoteActorRefProvider.(RemoteActorRefProvider.scala:128)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(ReflectiveDynamicAccess.scala:32)
at scala.util.Try$.apply(Try.scala:192)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(ReflectiveDynamicAccess.scala:27)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(ReflectiveDynamicAccess.scala:38)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(ReflectiveDynamicAccess.scala:38)
at scala.util.Success.flatMap(Try.scala:231)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(ReflectiveDynamicAccess.scala:38)
at akka.actor.ActorSystemImpl.liftedTree1$1(ActorSystem.scala:620)
at akka.actor.ActorSystemImpl.(ActorSystem.scala:613)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:142)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:119)
at akka.actor.ActorSystem$.create(ActorSystem.scala:67)
at akka.actor.ActorSystem.create(ActorSystem.scala)
at com.akkademo.Main.main(Main.java:15)
You issue is propably caused by using different minor versions of akka-remoting and the core akka-actor package.
Set the akka.version and akka.remote property to the same value. The current release is 2.5.19.
This question already has answers here:
java.lang.AbstractMethodError: javax.ws.rs.core.UriBuilder.uri
(8 answers)
Closed 3 years ago.
I am trying to build a REST webservice using an asynchronous response.
I have looked around this error on the web, however, none of the solutions have worked for me. I am not sure on how to go about it.
This is the code for the REST service, it has AsyncResponse, and #Suspended which are taken from jar file specified in the pom.xml, which I will provide below. The problem is, on deploying the war, I get an exception:
java.lang.AbstractMethodError: javax.ws.rs.core.UriBuilder.uri(Ljava/lang/String;)Ljavax/ws/rs/core/UriBuilder;
javax.ws.rs.core.UriBuilder.fromUri(UriBuilder.java:119)
com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:651)
javax.servlet.http.HttpServlet.service(HttpServlet.java:728)
org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
note The full stack trace of the root cause is available in the Apache Tomcat/7.0.50 logs
My class is as follows:
package com.crudapp;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import javax.annotation.Generated;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
//import javax.ws.rs.core.UriBuilder;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.google.gson.Gson;
import com.mysql.jdbc.StringUtils;
import dao.User;
import dao.UserDAO;
import dao.UserDAOImpl;
import javax.ws.rs.container.AsyncResponse;
import javax.ws.rs.container.Suspended;
#Path("/crudpath")
public class EntityResource {
private final ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("/spring.xml");
UserDAO userdao = null;
private final int numOfThreads = 10;
private final ExecutorService executorService = Executors.newFixedThreadPool(numOfThreads);
// userdao.getUsers("118");
//ctx.close();
#GET
#Produces("application/json")
public Response getTupleFromDBasJSON(#QueryParam("param1") String userid, #Suspended final AsyncResponse asyncresponse ){
if(StringUtils.isNullOrEmpty(userid))
throw new ServiceException("Userid passed to the REST service /crudpath is null or empty");
userdao = (userdao==null)?
ctx.getBean("userDAO", UserDAOImpl.class)
: userdao;
Gson gson = new Gson();
Future<List<User>> futures = executorService.submit(new DAOTaskHandlerThread(userid));
List <User> users = new ArrayList<User>();
if(futures.isDone())
{
try{
users = futures.get();
if(users!= null)
return Response.status(200).entity( gson.toJson(users).toString()).build();
}
catch(Exception ex)
{
throw new ServiceException(ex);
}
}
return Response.status(200).entity(new ArrayList<User>().toString()).build();
/*// crrate a new thread.. call the DAO .. returns the result from here.
JSONObject jsonObject = new JSONObject();
jsonObject.put("key", "value");
return Response.status(200).entity( jsonObject.toString()).build();*/
}
private class DAOTaskHandlerThread implements Callable<List<User>>{
//private UserDAO userDAO;
private String userid;
private DAOTaskHandlerThread(//UserDAO userDAO,
String useridpassed){
///this.userDAO= userDAO;
userid= useridpassed;
}
#Override
public List<User> call() throws Exception {
// TODO Auto-generated method stub
return userdao.getUsers(userid);
}
}
}
My pom.xml file for maven is as follow:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>RESTJerseyExample</groupId>
<artifactId>RESTJerseyExample</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<build>
<sourceDirectory>src</sourceDirectory>
<plugins>
<plugin>
<artifactId>maven-war-plugin</artifactId>
<version>2.4</version>
<configuration>
<warSourceDirectory>WebContent</warSourceDirectory>
<failOnMissingWebXml>false</failOnMissingWebXml>
</configuration>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<!-- spring framework just added -->
<properties>
<java-version>1.7</java-version>
<org.springframework-version>4.0.3.RELEASE</org.springframework-version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${org.springframework-version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>${org.springframework-version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>${org.springframework-version}</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<!-- spring framework just added ends here -->
<dependency>
<groupId>asm</groupId>
<artifactId>asm</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-bundle</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20140107</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<version>2.0</version>
</dependency>
<!-- used for httpclient library -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.3.2</version>
</dependency>
<!-- for async response -->
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.0-m12</version>
</dependency>
</dependencies>
</project>
AbstractMethodError are thrown when an application tries to call an abstract method.
uri is an abstract method in UriBuilder, so you need an implementation of this. This method (with String parameter) is from version 2.0 of JAX-RS specification.
You're trying to use JAX-RS 2.0 with Jersey 1.*. Instead, you need to use Jersey 2.* that implements JAX-RS 2.0 and contains an implementation to uri method.
In your pom.xml you may remove these dependencies:
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-bundle</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.0-m12</version>
</dependency>
And use these dependencies:
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>2.17</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>2.17</version>
</dependency>
Using this, uri method is implemented in JerseyUriBuilder class from jersey-common.
EDIT:
You need to change, in your web.xml, servlet com.sun.jersey.spi.container.servlet.ServletContainer to org.glassfish.jersey.servlet.ServletContainer and init-param from com.sun.jersey.config.property.packages to jersey.config.server.provider.packages
I would like to add one answer to this post. I faced a similar problem today and found the root cause to be another dependent jar which was Internally using an older version of Jersey/JAX-RS.
My POM before fix was:
<jersey.version>2.17</jersey.version>
...
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.ci.wrapper</groupId>
<artifactId>client-wrapper</artifactId>
<version>${clients-wrapper.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.api.commons</groupId>
<artifactId>transferobjects</artifactId>
<version>3.0.2</version>
</dependency>
The problem was with "com.ci.wrapper" and "com.api.commons".
They inturn included 2 different JAR's of BraveJersey and org.apache.cxf.cxf-rt-frontend-jaxrs (2.5.1) which were using Jersey and JAX-RS 1.X versions.
After excluding the nested jar's and adding the newer version's of BraveJersey2/org.apache.cxf.cxf-rt-frontend-jaxrs(3.1.5) it got resolved.
<dependency>
<groupId>com.api.commons</groupId>
<artifactId>transferobjects</artifactId>
<version>3.0.2</version>
<exclusions>
<exclusion>
<artifactId>cxf-rt-frontend-jaxrs</artifactId>
<groupId>org.apache.cxf</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxrs</artifactId>
<version>3.1.5</version>
</dependency>
<dependency>
<groupId>com.ci.wrapper</groupId>
<artifactId>client-wrapper</artifactId>
<version>${clients-wrapper.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<artifactId>brave-jersey</artifactId>
<groupId>com.github.kristofa</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.github.kristofa</groupId>
<artifactId>brave-jersey2</artifactId>
<version>2.4.2</version>
</dependency>
In case you're facing a similar issue, please check if the project's or jar's included could be using an incompatible version of Jersey/Jax-RS.
In my case it's combination of both cxf-rt-frontend-jaxrs and httpclint jar needed to be removed to resolve the issue. Both of these jars have the class javax.ws.rs.core.UriBuilder, the multiple version of this class caused the issue.
My pom had a transitive dependency on both these jars, after removing it worked.
enter code here
<exclusion>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</exclusion>
I have faced this problem when I was trying to use a library (custom built) in one of my project.
So the problem was happening due to mismatch in the com.sun.jersey dependencies. My project was using jersey version 2.15 whereas the custom library was giving me com.sun.jersey transitively of version 1.17.
So how to find such problems.
Use gradle dependencies task to find out the dependencies (it gives nested level results which means all the transitive dependencies will also be shown)
Once you identify the problem causing dependencies. Exclude them while adding the required dependencies in the project.
For e.g.
httpRestClient is name of my custom library which I wanted to use in my project.
So here is how I have added the dependency and at the same time excluded the conflicting dependencies of group 'com.sun.jersey'
compile(httpRestClient) {
exclude group: 'com.sun.jersey'
}
This way you can use whatever library and exclude the conflicting libraries.
Thanks.
In our case culprit was this dependency
<dependency>
<groupId>org.apache.wink</groupId>
<artifactId>wink-common</artifactId>
<version>1.0-incubating</version>
</dependency>
I'm always getting the following error.Can somebody help me please?
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/Logging
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at com.datastax.spark.connector.japi.DStreamJavaFunctions.<init>(DStreamJavaFunctions.java:24)
at com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions(CassandraStreamingJavaUtil.java:55)
at SparkStream.main(SparkStream.java:51)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.Logging
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 20 more
When I compile the following code. I've searched the web but didn't find a solution. I've got the error when I added the saveToCassandra.
import com.datastax.spark.connector.japi.CassandraStreamingJavaUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapToRow;
/**
* Created by jonas on 10/10/16.
*/
public class SparkStream implements Serializable{
public static void main(String[] args) throws Exception{
SparkConf conf = new SparkConf(true)
.setAppName("TwitterToCassandra")
.setMaster("local[*]")
.set("spark.cassandra.connection.host", "127.0.0.1")
.set("spark.cassandra.connection.port", "9042");
;
JavaSparkContext sc = new JavaSparkContext(conf);
JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(5000));
Map<String, String> kafkaParams = new HashMap<>();
kafkaParams.put("bootstrap.servers", "localhost:9092");
Set<String> topics = Collections.singleton("Test");
JavaPairInputDStream<String, String> directKafkaStream = KafkaUtils.createDirectStream(
ssc,
String.class,
String.class,
kafka.serializer.StringDecoder.class,
kafka.serializer.StringDecoder.class,
kafkaParams,
topics
);
JavaDStream<Tweet> createTweet = directKafkaStream.map(s -> createTweet(s._2));
CassandraStreamingJavaUtil.javaFunctions(createTweet)
.writerBuilder("mykeyspace", "rawtweet", mapToRow(Tweet.class))
.saveToCassandra();
ssc.start();
ssc.awaitTermination();
}
public static Tweet createTweet(String rawKafka){
String[] splitted = rawKafka.split("\\|");
Tweet t = new Tweet(splitted[0], splitted[1], splitted[2], splitted[3]);
return t;
}
}
My pom is the following.
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.company</groupId>
<artifactId>Sentiment</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>twitter4j.org</id>
<name>twitter4j.org Repository</name>
<url>http://twitter4j.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.scala-lang/scala-library -->
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.11.8</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.datastax.spark/spark-cassandra-connector_2.10 -->
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>1.6.2</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.9.0.0</version>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-core</artifactId>
<version>[4.0,)</version>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-stream</artifactId>
<version>4.0.4</version>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-async</artifactId>
<version>4.0.4</version>
</dependency>
</dependencies>
</project>
org.apache.spark.Logging is available in Spark version 1.5.2 or lower version. It is not in the 2.0.0. Pls change versions as follows
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>1.6.2</version>
</dependency>
The error is because you are using Spark 2.0 libraries with the connector from Spark 1.6 (which looks for the Spark 1.6 logging class. Use the 2.0.5 version of the connector.
It's because of the missing of org.apache.spark.Logging class since 1.5.2, just like everybody say. (Only org.apache.spark.internal.Logging exists in later version... )
But it seems none of the solution against maven can solve this dependency, so I just try to add this class to lib manually. Here is my way to fixed the problem:
Package the scala org.apache.spark.internal.Logging into a public jar. Or download it from https://raw.githubusercontent.com/swordsmanliu/SparkStreamingHbase/master/lib/spark-core_2.11-1.5.2.logging.jar (Thanks to this host.)
Move the jar into your spark cluster's jars directory.
Submit your project again, wish it will help u.
I got the solution by changing above mention jar.
Initially, I was having degraded jar for spark-kafka-streaming:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.1.1</version></dependency>
I also removed multilple sl4j-log4j.jars and log4j.jars which I had added externally from spark and kafka jar library.
if you're using IntelliJ, just check the 'include dependencies with provided scope' box and that will fix the issue for you without mucking around with the pom or manually downloading the files.
Download spark-core_2.11-1.5.2.logging.jar and use as --jar option
spark-submit --class com.SentimentTwiteer --packages "org.apache.spark:spark-streaming-twitter_2.11:1.6.3" --jars /root/Desktop/spark-core_2.11-1.5.2.logging.jar /root/Desktop/SentimentTwiteer.jar consumerKey consumerSecret accessToken accessTokenSecret yoursearchTag
https://github.com/sinhavicky4/SentimentTwiteer
One reason that may cause this problem is lib and class conflict.
I faced this problem and solved it using some maven exclusions:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.0.0</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.0.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>2.0.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
this pom.xml is resolve my issue:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.6.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.6.1</version>
</dependency>
Downloaded the jar and use --jars in spark-submit ,worked for me
spark-submit --class com.SentimentTwiteer --packages "org.apache.spark:spark-streaming-twitter_2.11:1.6.3" --jars /root/Desktop/spark-core_2.11-1.5.2.logging.jar /root/Desktop/SentimentTwiteer.jar XX XX XX XX
Download the below Jar ,and it into you library ,and it will work as expected.
https://raw.githubusercontent.com/swordsmanliu/SparkStreamingHbase/master/lib/spark-core_2.11-1.5.2.logging.jar
its a version issue try with latest version
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.1.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.1.0</version>
</dependency>
Missing logging Jar in your dependency jars list. Try to download "spark-core_2.11-1.5.2.logging" jar from mvn repository, then add it as external jar to your spark project, you don't get "java.lang.NoClassDefFoundError: org/apache/spark/Logging" error.
Based on scala version you can download the jar{2.10,2.11,etc}.
I am writing a spark application written in Scala that listens to a Kafka topic. The application just prints out the message received that's all. I am running this locally on my machine...
......
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
.....
val topics = "topicName";//args(2)
val numThreads = 1;
val zkQuorum = "zookeeper quorum"
val group = "groupName"
//val inputTable = args(5)
// Set application name (when we run this on the cluster this is what will appear in the spark UI)
val sparkConf = new SparkConf().setAppName("appName").setMaster("local")
// On what topics shall I listen? Create an array of topics to pass to the stream configurations...
val topicMap = topics.split(",").map((_, numThreads.toInt)).toMap
// Will poll every X seconds
val ssc = new StreamingContext(sparkConf, Seconds(15))
// Create stream (has not been triggered yet)
val stream = KafkaUtils.createStream(ssc, zkQuorum, group, topicMap).map(_._2)
// For each stream!
stream.foreachRDD { rdd =>
// Check if empty...
if (rdd.toLocalIterator.nonEmpty)
{
// Create SQL Context out of the streaming
val sqlContext = new SQLContext(ssc.sparkContext)
// Register temporary table out of the recieved
sqlContext.read.json(rdd).registerTempTable("resultRecieved")
// Create SQL Context, and you can filter
val result = sqlContext.sql(
"select * FROM resultRecieved")
if(result.count() == 0 )
println("No results!")
else{
println(result.count() + " Results!")
}
}
}
// Print stream
stream.print
// Trigger!
ssc.start()
// Await stopping of the service...
ssc.awaitTermination()
I am using Scala 2.10.4, and here is my maven configuration
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.0.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.5.1</version>
</dependency>
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka_2.10</artifactId>
<version>0.9.0-incubating</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-swing</artifactId>
<version>2.10.4</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.10.1</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>2.2.1</version>
</dependency>
</dependencies>
With scala.version being 2.10.1, and spark.version being 1.5.1.
I am getting
Exception in thread "main" java.lang.NoClassDefFoundError: akka/util/Helpers$ConfigOps$
at akka.remote.RemoteSettings.<init>(RemoteSettings.scala:49)
at akka.remote.RemoteActorRefProvider.<init>(RemoteActorRefProvider.scala:114)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)
at scala.util.Try$.apply(Try.scala:161)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
at scala.util.Success.flatMap(Try.scala:200)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)
at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:550)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1913)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1904)
at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
at org.apache.spark.rpc.akka.AkkaRpcEnvFactory.create(AkkaRpcEnv.scala:253)
at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:252)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:549)
at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:75)
at SIVConsumer$.main(SIVConsumer.scala:33)
at SIVConsumer.main(SIVConsumer.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)
Caused by: java.lang.ClassNotFoundException: akka.util.Helpers$ConfigOps$
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 38 more
Process finished with exit code 1
Any idea which dependencies aren't getting along well together? I have tried using the latest Scala binaries but it still fails.
I guess the problem is with spark-streaming version. It should be same as spark-core (1.5.2). Same for spark-streaming-kafka_2.10.
You can refer a working pom.xml from my sample project https://github.com/atulsm/Test_Projects/blob/master/pom.xml
The issue was incorrect versions of spark-streaming. For others, the following dependency list in maven allows
Listen to a kafka topic
Create a spark stream
Write to an hbase table
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.10.4</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka_2.10</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.2.2</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20150729</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.8.3</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.8.3</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>1.4.1</version>
</dependency>
I am working through the kafka quickstart:
http://kafka.apache.org/07/quickstart.html
and the basic Consumer Group example:
https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example
I have coded up the Consumer and ConsumerThreadPool as above:
import kafka.consumer.KafkaStream;
import kafka.consumer.ConsumerIterator;
public class Consumer implements Runnable {
private KafkaStream m_stream;
private Integer m_threadNumber;
public Consumer(KafkaStream a_stream, Integer a_threadNumber) {
m_threadNumber = a_threadNumber;
m_stream = a_stream;
}
public void run() {
ConsumerIterator<byte[], byte[]> it = m_stream.iterator();
while (it.hasNext()) {
System.out.println("Thread " + m_threadNumber + ": " + new String(it.next().message()));
}
System.out.println("Shutting down Thread: " + m_threadNumber);
}
}
A couple of other facets: I am using spring to manage my zookeeper:
import javax.inject.Named;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
#Configuration
#ComponentScan("com.truecar.inventory.worker.core")
public class AppConfig {
#Bean
#Named("consumerConfig")
private static ConsumerConfig createConsumerConfig() {
String zookeeperAddress = "127.0.0.1:2181";
String groupId = "inventory";
Properties props = new Properties();
props.put("zookeeper.connect", zookeeperAddress);
props.put("group.id", groupId);
props.put("zookeeper.session.timeout.ms", "400");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "1000");
return new ConsumerConfig(props);
}
}
And I am compiling with Maven and the OneJar Maven plugin. However, I compile and then run the resulting one jar I get the following error:
Aug 26, 2013 6:15:41 PM org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider registerDefaultFilters
INFO: JSR-330 'javax.inject.Named' annotation found and supported for component scanning
Exception in thread "main" java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.simontuffs.onejar.Boot.run(Boot.java:340)
at com.simontuffs.onejar.Boot.main(Boot.java:166)
Caused by: java.lang.NoClassDefFoundError: scala/ScalaObject
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:792)
at com.simontuffs.onejar.JarClassLoader.defineClass(JarClassLoader.java:803)
at com.simontuffs.onejar.JarClassLoader.findClass(JarClassLoader.java:710)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at com.simontuffs.onejar.JarClassLoader.loadClass(JarClassLoader.java:630)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.getDeclaredMethods0(Native Method)
at java.lang.Class.privateGetDeclaredMethods(Class.java:2521)
at java.lang.Class.getDeclaredMethods(Class.java:1845)
at org.springframework.core.type.StandardAnnotationMetadata.getAnnotatedMethods(StandardAnnotationMetadata.java:180)
at org.springframework.context.annotation.ConfigurationClassParser.doProcessConfigurationClass(ConfigurationClassParser.java:222)
at org.springframework.context.annotation.ConfigurationClassParser.processConfigurationClass(ConfigurationClassParser.java:165)
at org.springframework.context.annotation.ConfigurationClassParser.parse(ConfigurationClassParser.java:140)
at org.springframework.context.annotation.ConfigurationClassPostProcessor.processConfigBeanDefinitions(ConfigurationClassPostProcessor.java:282)
at org.springframework.context.annotation.ConfigurationClassPostProcessor.postProcessBeanDefinitionRegistry(ConfigurationClassPostProcessor.java:223)
at org.springframework.context.support.AbstractApplicationContext.invokeBeanFactoryPostProcessors(AbstractApplicationContext.java:630)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:461)
at org.springframework.context.annotation.AnnotationConfigApplicationContext.<init>(AnnotationConfigApplicationContext.java:73)
at com.truecar.inventory.worker.core.consumer.ConsumerThreadPool.<clinit>(ConsumerThreadPool.java:31)
at com.truecar.inventory.worker.core.application.Starter.main(Starter.java:20)
... 6 more
Caused by: java.lang.ClassNotFoundException: scala.ScalaObject
at com.simontuffs.onejar.JarClassLoader.findClass(JarClassLoader.java:713)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at com.simontuffs.onejar.JarClassLoader.loadClass(JarClassLoader.java:630)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 27 more
Now, I know little about Kafka, and nothing about Scala. How do I fix this? What should i try next? Is this a known issue? Do I need other dependencies? Here is the kafka version in my pom.xml:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.0-beta1</version>
</dependency>
Update: I contacted the Kafka dev mailing list, and they let me know some specific version requirements for the scala dependencies. However, there is also an undocumented log4j dependency, which results in another runtime, not compile time, exception.
Exception in thread "main" java.lang.reflect.InvocationTargetException
Caused by: java.lang.NoSuchMethodError: ch.qos.logback.classic.Logger.filterAndLog(Ljava/lang/String;Lorg/slf4j/Marker;Lch/qos/logback/classic/Level;Ljava/lang/String;[Ljava/lang/Object;Ljava/lang/Throwable;)V
at org.apache.log4j.Category.log(Category.java:333)
at org.apache.commons.logging.impl.Log4JLogger.debug(Log4JLogger.java:177)
Another Update:
I found the correct log4j dependency:
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
But now I am met with an even more cryptic runtime exception:
Exception in thread "main" java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.simontuffs.onejar.Boot.run(Boot.java:340)
at com.simontuffs.onejar.Boot.main(Boot.java:166)
Caused by: java.lang.NoClassDefFoundError: org/I0Itec/zkclient/IZkStateListener
at kafka.javaapi.consumer.ZookeeperConsumerConnector.<init>(ZookeeperConsumerConnector.scala:64)
at kafka.javaapi.consumer.ZookeeperConsumerConnector.<init>(ZookeeperConsumerConnector.scala:66)
at kafka.consumer.Consumer$.createJavaConsumerConnector(ConsumerConnector.scala:100)
at kafka.consumer.Consumer.createJavaConsumerConnector(ConsumerConnector.scala)
At this point I got the WTF kind of feeling. So I added another dependency:
<dependency>
<groupId>com.101tec</groupId>
<artifactId>zkclient</artifactId>
<version>0.3</version>
</dependency>
But this exposed yet another runtime exception:
Exception in thread "main" java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.simontuffs.onejar.Boot.run(Boot.java:340)
at com.simontuffs.onejar.Boot.main(Boot.java:166)
Caused by: java.lang.NoClassDefFoundError: com/yammer/metrics/core/Gauge
at kafka.consumer.ZookeeperConsumerConnector.createFetcher(ZookeeperConsumerConnector.scala:146)
at kafka.consumer.ZookeeperConsumerConnector.<init>(ZookeeperConsumerConnector.scala:113)
at kafka.javaapi.consumer.ZookeeperConsumerConnector.<init>(ZookeeperConsumerConnector.scala:64)
at kafka.javaapi.consumer.ZookeeperConsumerConnector.<init>(ZookeeperConsumerConnector.scala:66)
at kafka.consumer.Consumer$.createJavaConsumerConnector(ConsumerConnector.scala:100)
at kafka.consumer.Consumer.createJavaConsumerConnector(ConsumerConnector.scala)
I am hoping to be able to get this baby example up and running, but maybe this is the price to pay for using beta products? Maybe I should switch to Apache Active MQ. But that sounds less fun. Am I missing something?
The problem is that kafka beta was built in a way that pom generated with a jar isn't valid and maven could not recognize it and parse properly, thus fetching transitive dependencies. We've managed to mitigate this problem by enlisting all of the dependencies from that pom (scala, zk, etc) in our pom definition. We're waiting for next beta builds of kafka, in which problem will be fixed.
Full dependencies list is below. Note that you have to change scala version dependency accordingly to the postfix of your kafka artifact.
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.8.0</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.15</version>
<exclusions>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>
<exclusion>
<groupId>javax.jms</groupId>
<artifactId>jms</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>net.sf.jopt-simple</groupId>
<artifactId>jopt-simple</artifactId>
<version>3.2</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.6.4</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>2.8.0</version>
</dependency>
<dependency>
<groupId>com.101tec</groupId>
<artifactId>zkclient</artifactId>
<version>0.3</version>
</dependency>
<dependency>
<groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>com.yammer.metrics</groupId>
<artifactId>metrics-annotation</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<version>3.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest</artifactId>
<version>1.2</version>
<scope>test</scope>
</dependency>
As for the
Maybe I should switch to Apache Active MQ. But that sounds less fun.
Am I missing something?
Well, don't you forget that this is the beta release? Some bad things are happening, indeed, but currently we're running kafka 0.7 without any efforts.
I found this configuration of dependencies to be functional:
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>3.2.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>3.2.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.0-beta1</version>
</dependency>
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<version>1</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.9.2</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>com.101tec</groupId>
<artifactId>zkclient</artifactId>
<version>0.3</version>
</dependency>
<dependency>
<groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>2.2.0</version>
</dependency>
</dependencies>
This seems to work :
$ git clone https://github.com/buildlackey/cep
$ cd cep/kafka-0.8.x
$ mvn package
$ mvn exec:java -Dexec.mainClass=TestKafkaProducer
(via where can I find maven repository for kafka? )