Spring boot load properties file inside yaml file - java

I have an application.yml file that inside calls a properties file
When executing in my IDE (eclipse) I have no problem, but when running the jar in console (with java -jar) it doesn't load the properties file that is set in YAML file.
Here is my application.yml:
apache:
kafka:
producer:
properties: kafka-producer-${application.environment}.properties
consumer:
properties: kafka-consumer-${application.environment}.properties
and here the .properties:
#
# Apache Kafka Consumer Properties
##
bootstrap.servers=XXXX:9092
group.id=consumers
enable.auto.commit=true
auto.commit.interval.ms=1000
key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
session.timeout.ms=300000
request.timeout.ms=305000
I'm loading the properties file like this:
#Value("${apache.kafka.producer.properties}")
private String kafkaProducerProperties;
#Bean
public KafkaProducer<String, String> eventProducer() {
try {
Properties properties = new Properties();
properties.load(this.context.getResource("classpath:" + this.kafkaProducerProperties).getInputStream());
return new KafkaProducer<String, String>(properties);
} catch (final IOException exception) {
LOG.error("Error loading Kafka producer properties", exception);
}
return null;
}
Executing in the IDE, the this.kafkaProducerProperties has the right value, while executing the jar is null.
What am I doing wrong and why does it load right in the IDE, but not executing the jar?

Please try this code
#Bean
public KafkaProducer<String, String> eventProducer() {
try {
Properties properties = new Properties();
properties.put("bootstrap.servers","XXXX:9092");
properties.put("group.id","consumers");
properties.put("enable.auto.commit","true");
properties.put("auto.commit.interval.ms","1000");
properties.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
properties.put("session.timeout.ms","300000");
properties.put("request.timeout.ms","305000");
return new KafkaProducer<String, String>(properties);
} catch (final IOException exception) {
LOG.error("Error loading Kafka producer properties", exception);
}
// Avoid return null in your code instead throw an exception
throw new IllegalAccessException("Error loading Kafka producer properties");
}
To avoid hardcoding, you can also do this
1- Add this in your application.yml
bootstrap.servers: XXXX
group.id: consumers
enable.auto.commit: true
auto.commit.interval.ms: 1000
key.deserializer: org.apache.kafka.common.serialization.StringDeserializer
value.deserializer: org.apache.kafka.common.serialization.StringDeserializer
session.timeout.ms: 300000
request.timeout.ms: 305000
2- Add this code
#Value("${bootstrap.servers}")
private String bootstrapServers;
#Value("${group.id}")
private String groupId;
#Value("${enable.auto.commit}")
private String enableAutoCommit;
#Value("${auto.commit.interval.ms}")
private String autoCommit;
#Value("${key.deserializer}")
private String keyDeserializer;
#Value("${value.deserializer}")
private String valueDeserializer;
#Value("${session.timeout.ms}")
private String sessionTimeout;
#Value("${request.timeout.ms}")
private String requestTimeout;
#Bean
public KafkaProducer<String, String> eventProducer() {
try {
Properties properties = new Properties();
properties.put("bootstrap.servers",bootstrapServers+"9092");
properties.put("group.id",groupId);
properties.put("enable.auto.commit",enableAutoCommit);
properties.put("auto.commit.interval.ms",autoCommit);
properties.put("key.deserializer",keyDeserializer);
properties.put("value.deserializer",valueDeserializer);
properties.put("session.timeout.ms",sessionTimeout);
properties.put("request.timeout.ms",requestTimeout);
return new KafkaProducer<String, String>(properties);
} catch (final IOException exception) {
LOG.error("Error loading Kafka producer properties", exception);
}
// Avoid return null in your code instead throw an exception
throw new IllegalAccessException("Error loading Kafka producer properties");
}
Hope this helps.

Related

How to pass Kafka's --producer.config through Java

I'm using the below command to send records to a secure Kafka
bin/kafka-console-producer.sh --topic <My Kafka topic name> --bootstrap-server <My custom bootstrap server> --producer.config /Users/DY/SSL/ssl.properties
As you can see I have added the ssl.properties file's path to the --producer.config switch.
The ssl.properties file contains details about how to connect to secure kafka, its contents are below:
security.protocol=SSL
ssl.truststore.location=<My custom value>
ssl.truststore.password=<My custom value>
ssl.key.password=<My custom value>
ssl.keystore.location=<My custom value>
ssl.keystore.password=<My custom value>
Now, I want to use replicate this command with java producer.
The code that I've written is as:
public class MyProducer {
public static void main(String[] args) {
{
Properties properties = new Properties();
properties.put("bootstrap.servers", <My bootstrap server>);
properties.put("key.serializer", StringSerializer.class);
properties.put("value.serializer", StringSerializer.class);
properties.put("producer.config", "/Users/DY/SSL/ssl.properties");
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
ProducerRecord<String, String> producerRecord = new ProducerRecord<>(
<My bootstrap server>, "Hello World from program");
Future<RecordMetadata> future = kafkaProducer.send(
producerRecord,
(metadata, exception) -> {
if(exception != null){
System.out.printf("some thing wrong");
exception.printStackTrace();
}
else{
System.out.println("Successfully transmitted");
}
});
future.get()
kafkaProducer.close();
}
}
}
This way of passing the properties.put("producer.config", "/Users/DY/SSL/ssl.properties"); however does not seem to work. Could anybody let me know what would be an appropriate way to do this
Rather than use any file to pass the properties individually, you can use static client configs as below;
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
// for SSL Encryption
properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL");
properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "<My custom value>");
properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "<My custom value>");
// for SSL Authentication
properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "<My custom value>");
properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "<My custom value>");
properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "<My custom value>");
Required classes are;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SslConfigs;
You have to set each one as a discrete property in the producer Properties.
You could use Properties.load() with a FileInputStream or FileReader to load them from the file into your Properties object.

Spring boot tests with Testcontainers' kafka without DirtiesContext

My goal is to use kafka test containers with spring boot context in tests without #DirtiesContext. Problem is that without starting container separately for each test class I have no idea how to consume messages that were produced only by particular test class or method.
So I end up consuming messages that were not a part of even test class that is running.
One solution might be to purge topic of messages. I have no idea how to do this, I've tried to restart container but then next test was not able to connect to kafka.
Second solution that I had in mind is to have consumer that will be created at the beginning of test method and somehow record messages from latest while other staff in test will be called. I've been able to do so with embeded kafka, I have no idea how to do this using test containers.
Current configuration looks like this:
#TestConfiguration
public class KafkaContainerConfig {
#Bean(initMethod = "start", destroyMethod = "stop")
public KafkaContainer kafkaContainer() {
return new KafkaContainer("5.0.3");
}
#Bean
public KafkaAdmin kafkaAdmin(KafkaProperties kafkaProperties, KafkaContainer kafkaContainer) {
kafkaProperties.setBootstrapServers(List.of(kafkaContainer.getBootstrapServers()));
return new KafkaAdmin(kafkaProperties.buildAdminProperties());
}
}
With annotation that will provide above configuration
#Target({ElementType.TYPE})
#Retention(RetentionPolicy.RUNTIME)
#Import(KafkaContainerConfig.class)
#EnableAutoConfiguration(exclude = TestSupportBinderAutoConfiguration.class)
#TestPropertySource("classpath:/application-test.properties")
#DirtiesContext
public #interface IncludeKafkaTestContainer {
}
And in test class itself with multiple such configuration it would looks like:
#IncludeKafkaTestContainer
#IncludePostgresTestContainer
#SpringBootTest(webEnvironment = RANDOM_PORT)
class SomeTest {
...
}
Currently consumer in test method is created this way:
KafkaConsumer<String, String> kafkaConsumer = createKafkaConsumer("topic_name");
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
List<ConsumerRecord<String, String>> topicMsgs = Lists.newArrayList(consumerRecords.iterator());
And:
public static KafkaConsumer<String, String> createKafkaConsumer(String topicName) {
Properties properties = new Properties();
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers());
properties.put(ConsumerConfig.GROUP_ID_CONFIG, "testGroup_" + topicName);
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class)
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
kafkaConsumer.subscribe(List.of(topicName));
return kafkaConsumer;
}

Java Kafka Consumer noClassDefFoundError

I am learning using a test Kafka consumer & producer however facing below error.
Kafka consumer program:
package kafka001;
import java.util.Arrays;
import java.util.Properties;
import java.util.Scanner;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.errors.WakeupException;
public class ConsumerApp {
private static Scanner in;
private static boolean stop = false;
public static void main(String[] args) throws Exception {
System.out.println(args[0] + args.length);
if (args.length != 2) {
System.err.printf("Usage: %s <topicName> <groupId>\n");
System.exit(-1);
}
in = new Scanner(System.in);
String topicName = args[0];
String groupId = args[1];
ConsumerThread consumerRunnable = new ConsumerThread(topicName, groupId);
consumerRunnable.start();
//System.out.println("Here");
String line = "";
while (!line.equals("exit")) {
line = in.next();
}
consumerRunnable.getKafkaConsumer().wakeup();
System.out.println("Stopping consumer now.....");
consumerRunnable.join();
}
private static class ConsumerThread extends Thread{
private String topicName;
private String groupId;
private KafkaConsumer<String,String> kafkaConsumer;
public ConsumerThread(String topicName, String groupId){
//System.out.println("inside ConsumerThread constructor");
this.topicName = topicName;
this.groupId = groupId;
}
public void run() {
//System.out.println("inside run");
// Setup Kafka producer properties
Properties configProperties = new Properties();
configProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "aup7727s.unix.anz:9092");
configProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
configProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
configProperties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
configProperties.put(ConsumerConfig.CLIENT_ID_CONFIG, "simple");
// subscribe to topic
kafkaConsumer = new KafkaConsumer<String, String>(configProperties);
kafkaConsumer.subscribe(Arrays.asList(topicName));
// Get/process messages from topic and print it to console
try {while(true) {
ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
for (ConsumerRecord<String, String> record : records)
System.out.println(record.value());
}
} catch(WakeupException ex) {
System.out.println("Exception caught " + ex.getMessage());
}finally {
kafkaConsumer.close();
System.out.println("After closing KafkaConsumer");
}
}
public KafkaConsumer<String,String> getKafkaConsumer(){
return this.kafkaConsumer;
}
}
}
When I compile the code, I am noticing following class files:
ConsumerApp$ConsumerThread.class and
ConsumerApp.class
I've generated jar file named ConsumerApp.jar through eclipse and when I run this in Hadoop cluster, I get noclassdeffound error as below:
java -cp ConsumerApp.jar kafka001/ConsumerApp console1 group1
or
hadoop jar ConsumerApp.jar console1 group1
Exception in thread "main" java.lang.NoClassDefFoundError: org.apache.kafka.common.errors.WakeupException
at kafka001.ConsumerApp.main(ConsumerApp.java:24)
Caused by: java.lang.ClassNotFoundException: org.apache.kafka.common.errors.WakeupException
at java.net.URLClassLoader.findClass(URLClassLoader.java:607)
at java.lang.ClassLoader.loadClassHelper(ClassLoader.java:846)
at java.lang.ClassLoader.loadClass(ClassLoader.java:825)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:325)
at java.lang.ClassLoader.loadClass(ClassLoader.java:805)
... 1 more
I am using Eclipse to compile, maven build and generate jar file. Line number 24 correspond to creation of ConsumerThread instance.
I am unable to resolve if its due to ConsumerThread class name being incorrectly saved (Class file generated as ConsumerApp$ConsumerThread.class instead of ConsumerThread.class) ? or something to be taken care while generating jar file ?
Since I can't view the entire project, I would try this: Right click on the project -> go to Maven 2 tools -> click generate artifacts (check for updates). That should create any missing dependencies. Also make sure you check out other similar posts that may resolve your issue like this.

Error on setup config properties with Java/Selenium/TestNG/Maven

I tried best couldn't find a complete instructions on how to config a properties file with Maven,Testng.
Here are what I did and the exception I got:
from TestNG for suite, added
content of the config file:
user=testuser
password=pswd
pom.xml
src/test/resources
true
in code:
#BeforeTest #Parameters(value = { "config-file" })
public void initFramework(String configfile) throws Exception
{
InputStream stream = Config.class.getResourceAsStream("/config.properties");
Properties properties = new Properties();
try {
properties.load(stream);
String user = properties.getProperty("user");
String password = properties.getProperty("password");
System.out.println("\nGot User FirstName+LastName shows as:"+ user +"\n" + password + "===========");
} catch (IOException e) {
e.printStackTrace();
// You will have to take some action here...
}
}
Here is what I got when compile:
org.testng.TestNGException:
Parameter 'config-file' is required by #Configuration on method initFramework but has not been marked #Optional or defined
Question:
I think I got all options mixed but really wanted a working way to read the parameter for Java/Selenium/TestNG/Maven.
Properties CONFIG= new Properties();
FileInputStream ip = new FileInputStream("C://config.properties");
CONFIG.load(ip);
//Now simply read through property file:-
String user = CONFIG.getProperty("user");
String password = CONFIG.getProperty("password");
//To write property file:-
CONFIG.setProperty("user","newbie1");
CONFIG.setProperty("password","secret123");

How to export database schema using Hibernate SchemaExport with BeanValidation constraints?

please see my requirement: using SchemaExport to export database schema that appiled BeanValidation constraints(eg, #Length(32) will create DB constraints: column(32)).
In Hibernate 4.1.x, i can using the hack code post here: https://forum.hibernate.org/viewtopic.php?f=1&t=1024911&view=previous
but the Ejb3Configuration class that required in above hack code was removed in Hibernate 4.3.5.
so how can i export database schema that appiled BeanValidation constraints without using Ejb3Configuration?
Something like this should work:
PersistenceUnitDescriptorAdapter pu = new PersistenceUnitDescriptorAdapter() {
#Override
public List<String> getManagedClassNames() {
return Arrays.asList( MyClass.class.getName(), ... );
}
};
Map<Object, Object> settings = new HashMap<Object, Object>();
settings.put( "javax.persistence.schema-generation.scripts.action", "create" );
settings.put( "javax.persistence.schema-generation.scripts.create-target", "<path-to-export-file>" );
EntityManagerFactoryBuilderImpl factoryBuilder = new EntityManagerFactoryBuilderImpl( pu, settings );
factoryBuilder.generateSchema();
It relies on Hibernate internal classes, but so did your earlier solution. You could create a issue here - https://hibernate.onjira.com/browse/HHH - explaining your use case. Maybe a solution using a public API can be made available.
i found an temporary solution by using HibernationConfiguration build by EntityManagerFactoryBuilderImpl. it uses the JPA configuration to emit the schema script(with bean-validator constraints).
public final class JpaSchemaExporter
{
public JpaSchemaExporter(String utilName, String packageName, Properties properties, DialectType dialect,
Path outputPath) throws Exception
{
this.dialect = dialect;
this.outputPath = outputPath;
if (Files.exists(outputPath) && !Files.isDirectory(outputPath)) {
throw new IllegalArgumentException(
"Given path already exist and is not a directory! the path:" + outputPath);
}
Files.createDirectories(outputPath);
pud = new ParsedPersistenceXmlDescriptor(Resources.getResourceURL("META-INF"));
pud.setName(utilName);
pud.addClasses(Resources.getClasseNames(packageName));
pud.addMappingFiles("META-INF/orm.xml");
properties.setProperty("hibernate.dialect", dialect.getDialectClass());
ValidatorFactory validatorFactory = Validation.buildDefaultValidatorFactory();
factoryBuilder = new EntityManagerFactoryBuilderImpl( pud, properties );
factoryBuilder.withValidatorFactory(validatorFactory).build().close(); // create HibernateConfiguration instance
this.injectBeanValidationConstraintToDdlTranslator();
validatorFactory.close();
}
private void injectBeanValidationConstraintToDdlTranslator() {
try {
Configuration hibernateConfiguration = factoryBuilder.getHibernateConfiguration();
ValidatorFactory validatorFactory = (ValidatorFactory) factoryBuilder.getConfigurationValues().get(AvailableSettings.VALIDATION_FACTORY);
// private class in hibernate
Method applyRelationalConstraints = Class.forName("org.hibernate.cfg.beanvalidation.TypeSafeActivator")
.getMethod("applyRelationalConstraints",
ValidatorFactory.class,
java.util.Collection.class,
Properties.class,
Dialect.class);
applyRelationalConstraints.setAccessible(true);
Dialect dialectInstance = (Dialect) Class.forName(dialect.getDialectClass()).newInstance();
applyRelationalConstraints.invoke(null, validatorFactory,
Arrays.asList(Iterators.toArray(hibernateConfiguration.getClassMappings(), PersistentClass.class)) ,
hibernateConfiguration.getProperties(),
dialectInstance);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
#SuppressWarnings("unchecked")
public void create() throws IOException {
Configuration cfg = factoryBuilder.getHibernateConfiguration();
cfg.setProperty("hibernate.hbm2ddl.auto", "create");
SchemaExport export = new SchemaExport(cfg);
export.setDelimiter(";");
export.setOutputFile(Paths.get(outputPath.toString(), "ddl_create_" + dialect.name().toLowerCase() + ".sql").toString());
export.execute(true, false, false, true);
if (!export.getExceptions().isEmpty()) {
System.out.println();
System.out.println("SOME EXCEPTIONS OCCURED WHILE GENERATING THE UPDATE SCRIPT:");
for (Exception e : (List<Exception>) export.getExceptions()) {
System.out.println(e.getMessage());
}
}
}
#SuppressWarnings("unchecked")
public void update() throws IOException {
Configuration cfg = factoryBuilder.getHibernateConfiguration();
cfg.setProperty("hibernate.hbm2ddl.auto", "update");
SchemaUpdate updater = new SchemaUpdate(cfg);
updater.setDelimiter(";");
updater.setOutputFile(Paths.get(outputPath.toString(), "ddl_update_" + dialect.name().toLowerCase() + ".sql").toString());
updater.execute(true, false);
if (!updater.getExceptions().isEmpty()) {
System.out.println();
System.out.println("SOME EXCEPTIONS OCCURED WHILE GENERATING THE UPDATE SCRIPT:");
for (Exception e : ((List<Exception>) updater.getExceptions())) {
System.out.println(e.getMessage());
}
}
}
public void validate() {
Configuration hibernateConfiguration = factoryBuilder.getHibernateConfiguration();
hibernateConfiguration.setProperty("hibernate.hbm2ddl.auto", "validate");
SchemaValidator validator = new SchemaValidator(hibernateConfiguration);
validator.validate();
}
public static void main(String[] args) throws Exception {
Properties prop = new Properties(System.getProperties());
prop.setProperty("hibernate.connection.driver_class", "value in your env");
prop.setProperty("hibernate.connection.url", "value in your env");
prop.setProperty("hibernate.connection.username", "value in your env");
prop.setProperty("hibernate.connection.password", "value in your env");
Path path = Paths.get("schema output path in your env");
String packageName = prop.getProperty("package names of jpa classes");
String unitName = prop.getProperty("jpa Unit Name");
String[] dialects = "HSQL,MYSQL".split(",");
for(String dialect : dialects){
DialectType dialectType = DialectType.valueOf(dialect);
JpaSchemaExporter ddlExporter = new JpaSchemaExporter(unitName, packageName, prop, dialectType, path);
ddlExporter.update();
ddlExporter.create();
}
}
}

Categories

Resources