bazel - netty_tcnative errors - java

I'm trying to run a java application powered by grpc but I need netty_tcnative to work. I'm following an example https://github.com/grpc/grpc-java/blob/master/examples/example-tls/BUILD.bazel. So far, I haven't been successful to make it work. I get errors like this:
INFO: Transport failed
java.lang.NoClassDefFoundError: io/opencensus/contrib/grpc/metrics/RpcMeasureConstants
at io.grpc.internal.DeprecatedCensusConstants.<clinit>(DeprecatedCensusConstants.java:30)
at io.grpc.internal.CensusStatsModule$ServerTracerFactory.newServerStreamTracer(CensusStatsModule.java:674)
at io.grpc.internal.StatsTraceContext.newServerContext(StatsTraceContext.java:80)
at io.grpc.netty.shaded.io.grpc.netty.NettyServerHandler.onHeadersRead(NettyServerHandler.java:425)
at io.grpc.netty.shaded.io.grpc.netty.NettyServerHandler.access$900(NettyServerHandler.java:101)
at io.grpc.netty.shaded.io.grpc.netty.NettyServerHandler$FrameListener.onHeadersRead(NettyServerHandler.java:801)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.onHeadersRead(DefaultHttp2ConnectionDecoder.java:373)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.onHeadersRead(DefaultHttp2ConnectionDecoder.java:321)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$PrefaceFrameListener.onHeadersRead(DefaultHttp2ConnectionDecoder.java:660)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2InboundFrameLogger$1.onHeadersRead(Http2InboundFrameLogger.java:56)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader$2.processFragment(DefaultHttp2FrameReader.java:483)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readHeadersFrame(DefaultHttp2FrameReader.java:491)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader.processPayloadState(DefaultHttp2FrameReader.java:254)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readFrame(DefaultHttp2FrameReader.java:160)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2InboundFrameLogger.readFrame(Http2InboundFrameLogger.java:41)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder.decodeFrame(DefaultHttp2ConnectionDecoder.java:174)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2ConnectionHandler$FrameDecoder.decode(Http2ConnectionHandler.java:378)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2ConnectionHandler$PrefaceDecoder.decode(Http2ConnectionHandler.java:242)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2ConnectionHandler.decode(Http2ConnectionHandler.java:438)
at io.grpc.netty.shaded.io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:505)
at io.grpc.netty.shaded.io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:444)
at io.grpc.netty.shaded.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:283)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:352)
at io.grpc.netty.shaded.io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1421)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
at io.grpc.netty.shaded.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:930)
at io.grpc.netty.shaded.io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:794)
at io.grpc.netty.shaded.io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:424)
at io.grpc.netty.shaded.io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:326)
at io.grpc.netty.shaded.io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:918)
at io.grpc.netty.shaded.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.grpc.netty.shaded.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.ClassNotFoundException: io.opencensus.contrib.grpc.metrics.RpcMeasureConstants
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 36 more
Mar 19, 2020 2:17:32 PM io.grpc.netty.shaded.io.grpc.netty.NettyServerTransport notifyTerminated
INFO: Transport failed
java.lang.NoClassDefFoundError: Could not initialize class io.grpc.internal.DeprecatedCensusConstants
at io.grpc.internal.CensusStatsModule$ServerTracerFactory.newServerStreamTracer(CensusStatsModule.java:674)
at io.grpc.internal.StatsTraceContext.newServerContext(StatsTraceContext.java:80)
at io.grpc.netty.shaded.io.grpc.netty.NettyServerHandler.onHeadersRead(NettyServerHandler.java:425)
at io.grpc.netty.shaded.io.grpc.netty.NettyServerHandler.access$900(NettyServerHandler.java:101)
at io.grpc.netty.shaded.io.grpc.netty.NettyServerHandler$FrameListener.onHeadersRead(NettyServerHandler.java:801)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.onHeadersRead(DefaultHttp2ConnectionDecoder.java:373)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.onHeadersRead(DefaultHttp2ConnectionDecoder.java:321)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$PrefaceFrameListener.onHeadersRead(DefaultHttp2ConnectionDecoder.java:660)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2InboundFrameLogger$1.onHeadersRead(Http2InboundFrameLogger.java:56)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader$2.processFragment(DefaultHttp2FrameReader.java:483)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readHeadersFrame(DefaultHttp2FrameReader.java:491)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader.processPayloadState(DefaultHttp2FrameReader.java:254)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readFrame(DefaultHttp2FrameReader.java:160)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2InboundFrameLogger.readFrame(Http2InboundFrameLogger.java:41)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder.decodeFrame(DefaultHttp2ConnectionDecoder.java:174)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2ConnectionHandler$FrameDecoder.decode(Http2ConnectionHandler.java:378)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2ConnectionHandler$PrefaceDecoder.decode(Http2ConnectionHandler.java:242)
at io.grpc.netty.shaded.io.netty.handler.codec.http2.Http2ConnectionHandler.decode(Http2ConnectionHandler.java:438)
at io.grpc.netty.shaded.io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:505)
at io.grpc.netty.shaded.io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:444)
at io.grpc.netty.shaded.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:283)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:352)
at io.grpc.netty.shaded.io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1421)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
at io.grpc.netty.shaded.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
at io.grpc.netty.shaded.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:930)
at io.grpc.netty.shaded.io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:794)
at io.grpc.netty.shaded.io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:424)
at io.grpc.netty.shaded.io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:326)
at io.grpc.netty.shaded.io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:918)
at io.grpc.netty.shaded.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.grpc.netty.shaded.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
My BUILD file looks like that:
package(default_visibility = ["//visibility:public"])
load("#rules_proto//proto:defs.bzl", "proto_library")
load("#io_grpc_grpc_java//:java_grpc_library.bzl", "java_grpc_library")
load("#bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar")
load("#io_bazel_rules_docker//java:image.bzl", "java_image")
load("#io_bazel_rules_docker//container:container.bzl", "container_bundle", "container_image", "container_push")
load("#io_bazel_rules_docker//contrib:push-all.bzl", "docker_push")
proto_library(
name = "account_proto",
srcs = glob(["src/main/proto/account.proto"]),
deps = ["#googleapi//google/api:annotations_proto"],
)
java_proto_library(
name = "account_proto_java",
deps = [":account_proto"],
)
java_grpc_library(
name = "account_java_grpc",
srcs = [":account_proto"],
deps = [":account_proto_java"],
)
pkg_tar(
name = "cred_tar",
srcs = glob(["config/revoked-microservices-service_key.json"]),
package_dir = "src/main/app",
)
java_image(
name = "account_service",
srcs = glob(["src/main/java/**/*.java"]),
resources = glob(["src/main/resources/**"]),
runtime_deps = [
"#maven//:io_netty_netty_tcnative_boringssl_static",
],
deps = [
"#maven//:com_google_cloud_google_cloud_pubsub",
"#maven//:com_google_cloud_google_cloud_storage",
"#maven//:io_grpc_grpc_netty_shaded",
"#maven//:io_grpc_grpc_protobuf",
"#maven//:io_grpc_grpc_stub",
"#maven//:mysql_mysql_connector_java",
"#maven//:com_google_cloud_sql_mysql_socket_factory_connector_j_8",
"#maven//:com_zaxxer_HikariCP",
"#maven//:com_google_inject_guice",
"#maven//:jaxen_jaxen",
"#maven//:org_dom4j_dom4j",
"#maven//:org_slf4j_slf4j_simple",
"#maven//:org_slf4j_slf4j_api",
"#maven//:log4j_log4j",
"#maven//:com_google_code_gson_gson",
"#maven//:junit_junit",
"#maven//:com_sun_xml_ws_jaxws_rt",
"#maven//:javax_xml_ws_jaxws_api",
"#maven//:com_google_guava_guava",
"#maven//:io_grpc_grpc_api",
"#maven//:com_google_protobuf_protobuf_java",
"#maven//:com_google_protobuf_protobuf_java_util",
"#maven//:com_google_api_grpc_proto_google_common_protos",
"#io_grpc_grpc_java//api",
"#io_grpc_grpc_java//protobuf",
"#io_grpc_grpc_java//stub",
"#io_grpc_grpc_java//netty",
"#maven//:io_netty_netty_handler",
"#com_google_protobuf//:protobuf_java",
"#com_google_protobuf//:protobuf_java_util",
":account_proto_java",
":account_java_grpc",
],
main_class = "com.revoked.account.Application",
)
container_image(
name = "account_service_image",
base = ":account_service",
tars = [":cred_tar"],
ports = ["8080"],
workdir = "/usr/main/app",
env = {
"LANG": "en_US.UTF-8",
"TZ": "America/New_York",
},
)
container_bundle(
name = "bundle_image",
images = {
"revoked/account-microservice:bazel-test-1.0.2": ":account_service_image",
}
)
container_push(
name = "push_image",
image = ":account_service_image",
format = "Docker",
registry = "index.docker.io",
repository = "revoked/account-microservice",
tag = "bazel-test-1.0.2",
)
I'm not sure what I'm doing wrong. Any help would be great!

grpc-java's grpc-netty-shaded target does not actually shade, so it appears your #maven//:io_grpc_* targets are actually pulling from Maven Central. You should be using the grpc-java repository targets instead.
In order to have the Maven dependencies and Bazel dependencies mix appropriately, use maven_install's override_targets within your WORKSPACE as done in the example.
maven_install(
artifacts = IO_GRPC_GRPC_JAVA_ARTIFACTS,
generate_compat_repositories = True,
override_targets = IO_GRPC_GRPC_JAVA_OVERRIDE_TARGETS,
repositories = [
"https://repo.maven.apache.org/maven2/",
],
)

Related

Changing Java version in Bazel

I am using Bazel as the build tool for my Java project. I have JDK 11 installed on my mac, but Bazel uses Java 8 to build the binaries. Does anyone know how I could change this?
BUILD.bazel
java_binary(
name = 'JavaBinary',
srcs = ['JavaBinary.java'],
main_class = 'JavaBinary',
)
load(
"#bazel_tools//tools/jdk:default_java_toolchain.bzl",
"default_java_toolchain",
)
default_java_toolchain(
name = "default_toolchain",
visibility = ["//visibility:public"],
)
JavaBinary.java
public class JavaBinary {
public static void main(String[] args) {
System.out.println("Successfully executed JavaBinary!");
System.out.println("Version: " + System.getProperty("java.version"));
}
}
WORKSPACE.bazel
load("#bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "rules_java",
sha256 = "220b87d8cfabd22d1c6d8e3cdb4249abd4c93dcc152e0667db061fb1b957ee68",
url = "https://github.com/bazelbuild/rules_java/releases/download/0.1.1/rules_java-0.1.1.tar.gz",
)
load("#rules_java//java:repositories.bzl", "rules_java_dependencies", "rules_java_toolchains")
rules_java_dependencies()
rules_java_toolchains()
Run it this way:
bazel run :JavaBinary \
--java_toolchain=:default_toolchain \
--javabase=#bazel_tools//tools/jdk:remote_jdk11
You can also create .bazelrc file an then execute bazel run :JavaBinary:
.bazelrc
build --java_toolchain=:default_toolchain
build --javabase=#bazel_tools//tools/jdk:remote_jdk11

ClassNotFoundException while trying to use slick codegen

I am trying to use a custom codegen for the purpose of acquiring DateTime types from mysql instead of Timestamp. I just couldn't make the sbt task to run with the custom code generator.
class is located at /project-root/app/com/my/name
val conf = ConfigFactory.parseFile(new File("conf/application.conf")).resolve()
slick <<= slickCodeGenTask
lazy val slick = TaskKey[Seq[File]]("gen-tables")
lazy val slickCodeGenTask = (sourceManaged, dependencyClasspath in Compile, runner in Compile, streams) map { (dir, cp, r, s) =>
val outputDir = (dir / "slick").getPath
val url = conf.getString("slick.dbs.default.db.url")
val jdbcDriver = conf.getString("slick.dbs.default.db.driver")
val slickDriver = conf.getString("slick.dbs.default.driver").dropRight(1)
val pkg = "com.my.name"
val user = conf.getString("slick.dbs.default.db.user")
val password = conf.getString("slick.dbs.default.db.password")
toError(r.run(s"$pkg.CustomCodeGenerator", cp.files, Array(slickDriver, jdbcDriver, url, outputDir, pkg, user, password), s.log))
val fname = outputDir + s"/$pkg/Tables.scala"
Seq(file(fname))
}
it always gives the same exception below when i try to run sbt gen-tables
java.lang.ClassNotFoundException: com.my.name.CustomCodeGenerator
at java.lang.ClassLoader.findClass(ClassLoader.java:530)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sbt.classpath.ClasspathFilter.loadClass(ClassLoaders.scala:59)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at sbt.Run.getMainMethod(Run.scala:72)
at sbt.Run.run0(Run.scala:60)
at sbt.Run.sbt$Run$$execute$1(Run.scala:51)
at sbt.Run$$anonfun$run$1.apply$mcV$sp(Run.scala:55)
at sbt.Run$$anonfun$run$1.apply(Run.scala:55)
at sbt.Run$$anonfun$run$1.apply(Run.scala:55)
at sbt.Logger$$anon$4.apply(Logger.scala:84)
at sbt.TrapExit$App.run(TrapExit.scala:248)
at java.lang.Thread.run(Thread.java:745)
when i try some built in java classes or default slick codegen class just to experiment it founds the class
i tried changing the order of this task in the build.sbt class but didn't solved
Instead of
dependencyClasspath in Compile
use fullClasspath in Compile
see https://www.scala-sbt.org/1.x/docs/Howto-Classpaths.html

I am getting an error when using flume

I am using the below configuration
ClouderaTwitterAgent.sources = Twitter
ClouderaTwitterAgent.channels = MemChannel
ClouderaTwitterAgent.sinks = HDFS
ClouderaTwitterAgent.sources.Twitter.type = com.cloudera.flume.source.TwitterSource
ClouderaTwitterAgent.sources.Twitter.channels = MemChannel
ClouderaTwitterAgent.sources.Twitter.consumerKey = xxxxxxxxxxxxx
ClouderaTwitterAgent.sources.Twitter.consumerSecret = xxxxxxxxxxxxx
ClouderaTwitterAgent.sources.Twitter.accessToken = xxxxxxxxxxxxxxxx
ClouderaTwitterAgent.sources.Twitter.accessTokenSecret = xxxxxxxxxxxxxx
ClouderaTwitterAgent.sources.Twitter.keywords = Sully
ClouderaTwitterAgent.sinks.HDFS.channel = MemChannel
ClouderaTwitterAgent.sinks.HDFS.type = hdfs
ClouderaTwitterAgent.sinks.HDFS.hdfs.path = hdfs://localhost:9000/user/tweets
ClouderaTwitterAgent.sinks.HDFS.hdfs.fileType = DataStream
ClouderaTwitterAgent.sinks.HDFS.hdfs.writeFormat = Text
ClouderaTwitterAgent.sinks.HDFS.hdfs.batchSize = 1000
ClouderaTwitterAgent.sinks.HDFS.hdfs.rollSize = 0
ClouderaTwitterAgent.sinks.HDFS.hdfs.rollCount = 10000
ClouderaTwitterAgent.channels.MemChannel.type = memory
ClouderaTwitterAgent.channels.MemChannel.capacity = 10000
ClouderaTwitterAgent.channels.MemChannel.transactionCapacity = 100
and this is the command that I am using to run flume
`bin/flume-ng agent --conf ./conf/ -f conf/flume-cloudera.conf -Dflume.root.logger=DEBUG,console -n ClouderaTwitterAgent`
and this is the error that I am getting
2016-09-20 14:53:14,245 (Twitter4J Async Dispatcher[0]) [DEBUG - com.cloudera.flume.source.TwitterSource$1.onStatus(TwitterSource.java:121)] tweet arrived
2016-09-20 14:53:16,073 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.hdfs.BucketWriter.open(BucketWriter.java:234)] Creating hdfs://localhost:9000/user/tweets/FlumeData.1474363316543.tmp
2016-09-20 14:53:16,113 (SinkRunner-PollingRunner-DefaultSinkProcessor) [ERROR - org.apache.flume.sink.hdfs.HDFSEventSink.process(HDFSEventSink.java:459)] process failed
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:131)
at org.apache.hadoop.security.Groups.<init>(Groups.java:64)
at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:240)
at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:255)
at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:232)
at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:718)
at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:703)
at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:605)
at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2554)
at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2546)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2412)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:368)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)
at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:243)
at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:235)
at org.apache.flume.sink.hdfs.BucketWriter$9$1.run(BucketWriter.java:679)
at org.apache.flume.auth.SimpleAuthenticator.execute(SimpleAuthenticator.java:50)
at org.apache.flume.sink.hdfs.BucketWriter$9.call(BucketWriter.java:676)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
... 21 more
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.security.JniBasedUnixGroupsMapping
at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.<init>(JniBasedUnixGroupsMappingWithFallback.java:38)
... 26 more
2016-09-20 14:53:16,121 (SinkRunner-PollingRunner-DefaultSinkProcessor) [ERROR-org.apache.flume.SinkRunner$PollingRunner.run(SinkRunner.java:160)] Unable to deliver event. Exception follows.
Can anyone please help me. I am new to this and I got this configuration from the internet and I followed every instruction.
I even searched for any solution to this problem.
I will list out what I have done
1.I checked my system clock
2.removed the hdfs folder and then made a new folder
3.formatted the namenode
4.restarted the agent several times

How can I run DataNucleus Bytecode Enhancer from SBT?

I've put together a proof of concept which aims to provide a skeleton SBT multimodule project which utilizes DataNucleus JDO Enhancer with mixed Java and Scala sources.
The difficulty appears when I try to enhance persistence classes from SBT. Apparently, I'm not passing the correct classpath when calling Fork.java.fork(...) from SBT.
See also this question:
How can SBT generate metamodel classes from model classes using DataNucleus?
Exception in thread "main" java.lang.NoClassDefFoundError: Could not initialize class org.datanucleus.util.Localiser
at org.datanucleus.metadata.MetaDataManagerImpl.loadPersistenceUnit(MetaDataManagerImpl.java:1104)
at org.datanucleus.enhancer.DataNucleusEnhancer.getFileMetadataForInput(DataNucleusEnhancer.java:768)
at org.datanucleus.enhancer.DataNucleusEnhancer.enhance(DataNucleusEnhancer.java:488)
at org.datanucleus.api.jdo.JDOEnhancer.enhance(JDOEnhancer.java:125)
at javax.jdo.Enhancer.run(Enhancer.java:196)
at javax.jdo.Enhancer.main(Enhancer.java:130)
[info] Compiling 2 Java sources to /home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/klasses...
java.lang.IllegalStateException: errno = 1
at $54321831a5683ffa07b5$.runner(build.sbt:230)
at $54321831a5683ffa07b5$$anonfun$model$7.apply(build.sbt:259)
at $54321831a5683ffa07b5$$anonfun$model$7.apply(build.sbt:258)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40)
at sbt.std.Transform$$anon$4.work(System.scala:63)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
at sbt.Execute.work(Execute.scala:235)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226)
at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
For the sake of completeness and information, below you can see a java command line generated by SBT which can be executed by hand on a separate window, for example. It just works fine.
$ java -cp /home/rgomes/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.6.jar:/home/rgomes/.ivy2/cache/com.google.code.gson/gson/jars/gson-2.3.1.jar:/home/rgomes/.ivy2/cache/javax.jdo/jdo-api/jars/jdo-api-3.0.jar:/home/rgomes/.ivy2/cache/javax.transaction/transaction-api/jars/transaction-api-1.1.jar:/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-core/jars/datanucleus-core-4.0.4.jar:/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-api-jdo/jars/datanucleus-api-jdo-4.0.4.jar:/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-jdo-query/jars/datanucleus-jdo-query-4.0.4.jar:/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-rdbms/jars/datanucleus-rdbms-4.0.4.jar:/home/rgomes/.ivy2/cache/com.h2database/h2/jars/h2-1.4.185.jar:/home/rgomes/.ivy2/cache/org.postgresql/postgresql/jars/postgresql-9.4-1200-jdbc41.jar:/home/rgomes/.ivy2/cache/com.github.dblock.waffle/waffle-jna/jars/waffle-jna-1.7.jar:/home/rgomes/.ivy2/cache/net.java.dev.jna/jna/jars/jna-4.1.0.jar:/home/rgomes/.ivy2/cache/net.java.dev.jna/jna-platform/jars/jna-platform-4.1.0.jar:/home/rgomes/.ivy2/cache/org.slf4j/slf4j-simple/jars/slf4j-simple-1.7.7.jar:/home/rgomes/.ivy2/cache/org.slf4j/slf4j-api/jars/slf4j-api-1.7.7.jar:/home/rgomes/workspace/poc-scala-datanucleus/model/src/main/resources:/home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/klasses javax.jdo.Enhancer -v -pu persistence-h2 -d /home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/classes
May 13, 2015 3:30:07 PM org.datanucleus.enhancer.ClassEnhancerImpl save
INFO: Writing class file "/home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/classes/model/AbstractModel.class" with enhanced definition
May 13, 2015 3:30:07 PM org.datanucleus.enhancer.DataNucleusEnhancer addMessage
INFO: ENHANCED (Persistable) : model.AbstractModel
May 13, 2015 3:30:07 PM org.datanucleus.enhancer.ClassEnhancerImpl save
INFO: Writing class file "/home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/classes/model/Identifier.class" with enhanced definition
May 13, 2015 3:30:07 PM org.datanucleus.enhancer.DataNucleusEnhancer addMessage
INFO: ENHANCED (Persistable) : model.Identifier
May 13, 2015 3:30:07 PM org.datanucleus.enhancer.DataNucleusEnhancer addMessage
INFO: DataNucleus Enhancer completed with success for 2 classes. Timings : input=112 ms, enhance=102 ms, total=214 ms. Consult the log for full details
Enhancer Processing -v.
Enhancer adding Persistence Unit persistence-h2.
Enhancer processing output directory /home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/classes.
Enhancer found JDOEnhancer of class org.datanucleus.api.jdo.JDOEnhancer.
Enhancer property key:VendorName value:DataNucleus.
Enhancer property key:VersionNumber value:4.0.4.
Enhancer property key:API value:JDO.
Enhancer enhanced 2 classes.
Below you can see some debugging information which is passed to Fork.java.fork(...):
=============================================================
mainClass=javax.jdo.Enhancer
args=-v -pu persistence-h2 -d /home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/classes
javaHome=None
cwd=/home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/classes
runJVMOptions=
bootJars ---------------------------------------------
/home/rgomes/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.6.jar
/home/rgomes/.ivy2/cache/com.google.code.gson/gson/jars/gson-2.3.1.jar
/home/rgomes/.ivy2/cache/javax.jdo/jdo-api/jars/jdo-api-3.0.jar
/home/rgomes/.ivy2/cache/javax.transaction/transaction-api/jars/transaction-api-1.1.jar
/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-core/jars/datanucleus-core-4.0.4.jar
/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-api-jdo/jars/datanucleus-api-jdo-4.0.4.jar
/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-jdo-query/jars/datanucleus-jdo-query-4.0.4.jar
/home/rgomes/.ivy2/cache/org.datanucleus/datanucleus-rdbms/jars/datanucleus-rdbms-4.0.4.jar
/home/rgomes/.ivy2/cache/com.h2database/h2/jars/h2-1.4.185.jar
/home/rgomes/.ivy2/cache/org.postgresql/postgresql/jars/postgresql-9.4-1200-jdbc41.jar
/home/rgomes/.ivy2/cache/com.github.dblock.waffle/waffle-jna/jars/waffle-jna-1.7.jar
/home/rgomes/.ivy2/cache/net.java.dev.jna/jna/jars/jna-4.1.0.jar
/home/rgomes/.ivy2/cache/net.java.dev.jna/jna-platform/jars/jna-platform-4.1.0.jar
/home/rgomes/.ivy2/cache/org.slf4j/slf4j-simple/jars/slf4j-simple-1.7.7.jar
/home/rgomes/.ivy2/cache/org.slf4j/slf4j-api/jars/slf4j-api-1.7.7.jar
/home/rgomes/workspace/poc-scala-datanucleus/model/src/main/resources
/home/rgomes/workspace/poc-scala-datanucleus/model/target/scala-2.11/klasses
envVars ----------------------------------------------
=============================================================
The project is available in github for your convenience at
https://github.com/frgomes/poc-scala-datanucleus
Just download it and type
./sbt compile
Any help is immensely appreciated. Thanks
You can either use java.lang.ProcessBuilder or sbt.Fork.
See below a generic javaRunner you can add to your build.sbt which employs java.lang.ProcessBuilder.
See also a generic sbtRunner you can add to your build.sbt which employs sbt.Fork. Thanks to #dwijnand for providing insightful information for making sbtRunner work as expected.
def javaRunner(mainClass: String,
args: Seq[String],
classpath: Seq[File],
cwd: File,
javaHome: Option[File] = None,
runJVMOptions: Seq[String] = Nil,
envVars: Map[String, String] = Map.empty,
connectInput: Boolean = false,
outputStrategy: Option[OutputStrategy] = Some(StdoutOutput)): Seq[File] = {
val java_ : String = javaHome.fold("") { p => p.absolutePath + "/bin/" } + "java"
val jvm_ : Seq[String] = runJVMOptions.map(p => p.toString)
val cp_ : Seq[String] = classpath.map(p => p.absolutePath)
val env_ = envVars.map({ case (k,v) => s"${k}=${v}" })
val xcmd_ : Seq[String] = Seq(java_) ++ jvm_ ++ Seq("-cp", cp_.mkString(java.io.File.pathSeparator), mainClass) ++ args
println("=============================================================")
println(xcmd_.mkString(" "))
println("=============================================================")
println("")
IO.createDirectory(cwd)
import scala.collection.JavaConverters._
val cmd = xcmd_.asJava
val pb = new java.lang.ProcessBuilder(cmd)
pb.directory(cwd)
pb.inheritIO
val process = pb.start()
def cancel() = {
println("Run canceled.")
process.destroy()
1
}
val errno = try process.waitFor catch { case e: InterruptedException => cancel() }
if(errno==0) {
if (args.contains("-v")) cwd.list.foreach(f => println(f))
cwd.listFiles
} else {
throw new IllegalStateException(s"errno = ${errno}")
}
}
def sbtRunner(mainClass: String,
args: Seq[String],
classpath: Seq[File],
cwd: File,
javaHome: Option[File] = None,
runJVMOptions: Seq[String] = Nil,
envVars: Map[String, String] = Map.empty,
connectInput: Boolean = false,
outputStrategy: Option[OutputStrategy] = Some(StdoutOutput)): Seq[File] = {
val args_ = args.map(p => p.toString)
val java_ = javaHome.fold("None") { p => p.absolutePath }
val cp_ = classpath.map(p => p.absolutePath)
val jvm_ = runJVMOptions.map(p => p.toString) ++ Seq("-cp", cp_.mkString(java.io.File.pathSeparator))
val env_ = envVars.map({ case (k,v) => s"${k}=${v}" })
def dump: String =
s"""
|mainClass=${mainClass}
|args=${args_.mkString(" ")}
|javaHome=${java_}
|cwd=${cwd.absolutePath}
|runJVMOptions=${jvm_.mkString(" ")}
|classpath --------------------------------------------
|${cp_.mkString("\n")}
|envVars ----------------------------------------------
|${env_.mkString("\n")}
""".stripMargin
def cmd: String =
s"""java ${jvm_.mkString(" ")} ${mainClass} ${args_.mkString(" ")}"""
println("=============================================================")
println(dump)
println("=============================================================")
println(cmd)
println("=============================================================")
println("")
IO.createDirectory(cwd)
val options =
ForkOptions(
javaHome = javaHome,
outputStrategy = outputStrategy,
bootJars = Seq.empty,
workingDirectory = Option(cwd),
runJVMOptions = jvm_,
connectInput = connectInput,
envVars = envVars)
val process = new Fork("java", Option(mainClass)).fork(options, args)
def cancel() = {
println("Run canceled.")
process.destroy()
1
}
val errno = try process.exitValue() catch { case e: InterruptedException => cancel() }
if(errno==0) {
if (args.contains("-v")) cwd.list.foreach(f => println(f))
cwd.listFiles
} else {
throw new IllegalStateException(s"errno = ${errno}")
}
}
Then you need to wire DataNucleus Enhancer as part of your build process. This is done via manipulateBytecode sub-task, as demonstrated below:
lazy val model =
project.in(file("model"))
// .settings(publishSettings:_*)
.settings(librarySettings:_*)
.settings(paranoidOptions:_*)
.settings(otestFramework: _*)
.settings(deps_tagging:_*)
//-- .settings(deps_stream:_*)
.settings(deps_database:_*)
.settings(
Seq(
// This trick requires SBT 0.13.8
manipulateBytecode in Compile := {
val previous = (manipulateBytecode in Compile).value
sbtRunner( // javaRunner also works!
mainClass = "javax.jdo.Enhancer",
args =
Seq(
"-v",
"-pu", "persistence-h2",
"-d", (classDirectory in Compile).value.absolutePath),
classpath =
(managedClasspath in Compile).value.files ++
(unmanagedResourceDirectories in Compile).value :+
(classDirectory in Compile).value,
cwd = (classDirectory in Compile).value,
javaHome = javaHome.value,
envVars = (envVars in Compile).value
)
previous
}
):_*)
.dependsOn(util)
For a complete example, including a few JDO annotated persistence classes and some rudimentary test cases, please have a look at
http://github.com/frgomes/poc-scala-datanucleus
I think the issue is you're passing your dependency jars as boot jars not as the classpath.
From your poc project perhaps something like:
val jvm_ = runJVMOptions.map(p => p.toString) ++
Seq("-cp", cp_ mkString java.io.File.pathSeparator)

Stanford NER Error: Loading distsim lexicon Failed

In my project. I need to use NER annotation so I used NERDemo.java
It works fine when I create a new project and have only this code, but when I add it to my project I keep getting errors. I have edited the path in my code to the specific location of the classifiers.
I added the Jar files:
This is the code:
String serializedClassifier = "/Users/ha/stanford-ner-2014-10-26/classifiers/english.all.3class.distsim.crf.ser.gz";
String serializedClassifier2 = "/Users/ha/stanford-ner-2014-10-26/classifiers/english.muc.7class.distsim.crf.ser.gz";
if (args.length > 0) {
serializedClassifier = args[0];
}
NERClassifierCombiner classifier = new NERClassifierCombiner(false, false, serializedClassifier, serializedClassifier2);
String fileContents = IOUtils.slurpFile("/Users/ha/NetBeansProjects/StanfordPOSCode/src/stanfordposcode/input.txt");
List<List<CoreLabel>> out = classifier.classify(fileContents);
int i = 0;
for (List<CoreLabel> lcl : out) {
i++;
int j = 0;
for (CoreLabel cl : lcl) {
j++;
System.out.printf("%d:%d: %s%n", i, j,
cl.toShorterString("Text", "CharacterOffsetBegin", "CharacterOffsetEnd", "NamedEntityTag"));
}
}
But I got this error:
run:
Loading classifier from /Users/ha/stanford-ner-2014-10-26/classifiers/english.all.3class.distsim.crf.ser.gz ... Loading distsim lexicon from /u/nlp/data/pos_tags_are_useless/egw4-reut.512.clusters ... java.lang.RuntimeException: java.io.FileNotFoundException: /u/nlp/data/pos_tags_are_useless/egw4-reut.512.clusters (No such file or directory)
at edu.stanford.nlp.objectbank.ReaderIteratorFactory$ReaderIterator.setNextObject(ReaderIteratorFactory.java:225)
at edu.stanford.nlp.objectbank.ReaderIteratorFactory$ReaderIterator.<init>(ReaderIteratorFactory.java:161)
at edu.stanford.nlp.objectbank.ReaderIteratorFactory.iterator(ReaderIteratorFactory.java:98)
at edu.stanford.nlp.objectbank.ObjectBank$OBIterator.<init>(ObjectBank.java:404)
at edu.stanford.nlp.objectbank.ObjectBank.iterator(ObjectBank.java:242)
at edu.stanford.nlp.ie.NERFeatureFactory.initLexicon(NERFeatureFactory.java:471)
at edu.stanford.nlp.ie.NERFeatureFactory.init(NERFeatureFactory.java:379)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.reinit(AbstractSequenceClassifier.java:171)
at edu.stanford.nlp.ie.crf.CRFClassifier.loadClassifier(CRFClassifier.java:2630)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1620)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1736)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1679)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1662)
at edu.stanford.nlp.ie.crf.CRFClassifier.getClassifier(CRFClassifier.java:2851)
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifierFromPath(ClassifierCombiner.java:189)
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifiers(ClassifierCombiner.java:173)
at edu.stanford.nlp.ie.ClassifierCombiner.<init>(ClassifierCombiner.java:125)
at edu.stanford.nlp.ie.NERClassifierCombiner.<init>(NERClassifierCombiner.java:52)
at stanfordposcode.MultipleNERs.main(MultipleNERs.java:24)
Caused by: java.io.FileNotFoundException: /u/nlp/data/pos_tags_are_useless/egw4-reut.512.clusters (No such file or directory)
at java.io.FileInputStream.open(Native Method)
at java.io.FileInputStream.<init>(FileInputStream.java:131)
at edu.stanford.nlp.io.EncodingFileReader.<init>(EncodingFileReader.java:78)
at edu.stanford.nlp.objectbank.ReaderIteratorFactory$ReaderIterator.setNextObject(ReaderIteratorFactory.java:192)
... 18 more
Loading classifier from /Users/ha/stanford-ner-2014-10-26/classifiers/english.all.3class.distsim.crf.ser.gz ... Exception in thread "main" java.io.FileNotFoundException
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifierFromPath(ClassifierCombiner.java:199)
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifiers(ClassifierCombiner.java:173)
at edu.stanford.nlp.ie.ClassifierCombiner.<init>(ClassifierCombiner.java:125)
at edu.stanford.nlp.ie.NERClassifierCombiner.<init>(NERClassifierCombiner.java:52)
at stanfordposcode.MultipleNERs.main(MultipleNERs.java:24)
Caused by: java.lang.ClassCastException: java.util.ArrayList cannot be cast to edu.stanford.nlp.classify.LinearClassifier
at edu.stanford.nlp.ie.ner.CMMClassifier.loadClassifier(CMMClassifier.java:1070)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1620)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1736)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1679)
at edu.stanford.nlp.ie.AbstractSequenceClassifier.loadClassifier(AbstractSequenceClassifier.java:1662)
at edu.stanford.nlp.ie.ner.CMMClassifier.getClassifier(CMMClassifier.java:1116)
at edu.stanford.nlp.ie.ClassifierCombiner.loadClassifierFromPath(ClassifierCombiner.java:195)
... 4 more
Java Result: 1
BUILD SUCCESSFUL (total time: 1 second)
You are mixing and matching the code from version 3.4 and the models from version 3.5. I suggest upgrading everything to the latest version.

Categories

Resources