I want to test MessageProcessor1.listAllKeyword method, which in turn
calls HbaseUtil1.getAllKeyword method. Initialy, I had to deal with a problem associated with the static initializer and the constructor. The problem was to initialize a HBASE DB connection. I used powerMock to suppress static and constructor calls and it worked fine.
Even though I mocked HbaseUtil1.getAllKeyword method, actual method is being called and executes all HBase code leading to an exception, in which HBASE server is not up.
EasyMock.expect(hbaseUtil.getAllKeyword("msg", "u1")).andReturn(expectedList);
Please give me any idea on how to avoid an actual method call. I tried many ways but none of them worked.
public class MessageProcessor1
{
private static Logger logger = Logger.getLogger("MQ-Processor");
private final static String CLASS_NAME = "MessageProcessor";
private static boolean keywordsTableExists = false;
public static PropertiesLoader props;
HbaseUtil1 hbaseUtil;
/**
* For checking if table exists in HBase. If doesn't exists, will create a
* new table. This runs only once when class is loaded.
*/
static {
props = new PropertiesLoader();
String[] userTablefamilys = {
props.getProperty(Constants.COLUMN_FAMILY_NAME_COMMON_KEYWORDS),
props.getProperty(Constants.COLUMN_FAMILY_NAME_USER_KEYWORDS) };
keywordsTableExists = new HbaseUtil()
.creatTable(props.getProperty(Constants.HBASE_TABLE_NAME),
userTablefamilys);
}
/**
* This will load new configuration every time this class instantiated.
*/
{
props = new PropertiesLoader();
}
public String listAllKeyword(String userId) throws IOException {
HbaseUtil1 util = new HbaseUtil1();
Map<String, List<String>> projKeyMap = new HashMap<String, List<String>>();
//logger.info(CLASS_NAME+": inside listAllKeyword method");
//logger.debug("passed id : "+userId);
List<String> qualifiers = util.getAllKeyword("msg", userId);
List<String> keywords = null;
for (String qualifier : qualifiers) {
String[] token = qualifier.split(":");
if (projKeyMap.containsKey(token[0])) {
projKeyMap.get(token[0]).add(token[1]);
} else {
keywords = new ArrayList<String>();
keywords.add(token[1]);
projKeyMap.put(token[0], keywords);
}
}
List<Project> projects = buildProject(projKeyMap);
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation()
.create();
System.out.println("Json projects:::" + gson.toJson(projects));
//logger.debug("list all keyword based on project::::"+ gson.toJson(projects));
//return gson.toJson(projects);
return "raj";
}
private List<Project> buildProject(Map<String, List<String>> projKeyMap) {
List<Project> projects = new ArrayList<Project>();
Project proj = null;
Set<String> keySet = projKeyMap.keySet();
for (String hKey : keySet) {
proj = new Project(hKey, projKeyMap.get(hKey));
projects.add(proj);
}
return projects;
}
//#Autowired
//#Qualifier("hbaseUtil1")
public void setHbaseUtil(HbaseUtil1 hbaseUtil) {
this.hbaseUtil = hbaseUtil;
}
}
public class HbaseUtil1 {
private static Logger logger = Logger.getLogger("MQ-Processor");
private final static String CLASS_NAME = "HbaseUtil";
private static Configuration conf = null;
public HbaseUtil1() {
PropertiesLoader props = new PropertiesLoader();
conf = HBaseConfiguration.create();
conf.set(HConstants.ZOOKEEPER_QUORUM, props
.getProperty(Constants.HBASE_CONFIGURATION_ZOOKEEPER_QUORUM));
conf.set(
HConstants.ZOOKEEPER_CLIENT_PORT,
props.getProperty(Constants.HBASE_CONFIGURATION_ZOOKEEPER_CLIENT_PORT));
conf.set("hbase.zookeeper.quorum", props
.getProperty(Constants.HBASE_CONFIGURATION_ZOOKEEPER_QUORUM));
conf.set(
"hbase.zookeeper.property.clientPort",
props.getProperty(Constants.HBASE_CONFIGURATION_ZOOKEEPER_CLIENT_PORT));
}
public List<String> getAllKeyword(String tableName, String rowKey)
throws IOException {
List<String> qualifiers = new ArrayList<String>();
HTable table = new HTable(conf, tableName);
Get get = new Get(rowKey.getBytes());
Result rs = table.get(get);
for (KeyValue kv : rs.raw()) {
System.out.println("KV: " + kv + ", keyword: "
+ Bytes.toString(kv.getRow()) + ", quaifier: "
+ Bytes.toString(kv.getQualifier()) + ", family: "
+ Bytes.toString(kv.getFamily()) + ", value: "
+ Bytes.toString(kv.getValue()));
qualifiers.add(new String(kv.getQualifier()));
}
table.close();
return qualifiers;
}
/**
* Create a table
*
* #param tableName
* name of table to be created.
* #param familys
* Array of the name of column families to be created with table
* #throws IOException
*/
public boolean creatTable(String tableName, String[] familys) {
HBaseAdmin admin = null;
boolean tableCreated = false;
try {
admin = new HBaseAdmin(conf);
if (!admin.tableExists(tableName)) {
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
for (int i = 0; i < familys.length; i++) {
tableDesc.addFamily(new HColumnDescriptor(familys[i]));
}
admin.createTable(tableDesc);
System.out.println("create table " + tableName + " ok.");
}
tableCreated = true;
admin.close();
} catch (MasterNotRunningException e1) {
e1.printStackTrace();
} catch (ZooKeeperConnectionException e1) {
e1.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return tableCreated;
}
}
Below is my Test class.
#RunWith(PowerMockRunner.class)
#PrepareForTest(MessageProcessor1.class)
#SuppressStaticInitializationFor("com.serendio.msg.mqProcessor.MessageProcessor1")
public class MessageProcessorTest1 {
private MessageProcessor1 messageProcessor;
private HbaseUtil1 hbaseUtil;
#Before
public void setUp() {
messageProcessor = new MessageProcessor1();
hbaseUtil = EasyMock.createMock(HbaseUtil1.class);
}
#Test
public void testListAllKeyword(){
List<String> expectedList = new ArrayList<String>();
expectedList.add("raj:abc");
suppress(constructor(HbaseUtil1.class));
//suppress(method(HbaseUtil1.class, "getAllKeyword"));
try {
EasyMock.expect(hbaseUtil.getAllKeyword("msg", "u1")).andReturn(expectedList);
EasyMock.replay();
assertEquals("raj", messageProcessor.listAllKeyword("u1"));
} catch (IOException e) {
e.printStackTrace();
}catch (Exception e) {
e.printStackTrace();
}
}
}
The HbaseUtil1 is instantiated within the listAllKeyword method
public String listAllKeyword(String userId) throws IOException {
HbaseUtil1 util = new HbaseUtil1();
...
So the mock one you create in your test isn't being used at all.
If possible, make the HbaseUtil1 object passable, or settable on the MessageProcessor1 class and then set it in the test class.
Also, and note I'm not 100% familiar with PowerMock, you could include HbaseUtil1 in the prepare for test annotation. I think that will make PowerMock instantiate mocks instead of real objects and then use the expectations you provide in you test.
Related
I created a spring boot project.
I use spring data with elastic search.
The whole pipeline: controller -> service -> repository is ready.
I now have a file that represents country objects (name and isoCode) and I want to create a job to insert them all in elastic search.
I read the spring documentation and I find that there's too much configuration for such a simple job.
So I'm trying to do a simple main "job" that reads a csv, creates objects and insert them in elastic search.
But I have a bit of trouble to understand how injection would work in this case:
#Component
public class InsertCountriesJob {
private static final String file = "D:path\\to\\countries.dat";
private static final Logger LOG = LoggerFactory.getLogger(InsertCountriesJob.class);
#Autowired
public CountryService service;
public static void main(String[] args) {
LOG.info("Starting insert countries job");
try {
saveCountries();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void saveCountries() throws Exception {
try (CSVReader csvReader = new CSVReader(new FileReader(file))) {
String[] values = null;
while ((values = csvReader.readNext()) != null) {
String name = values[0];
String iso = values[1].equals("N") ? values[2] : values[1];
Country country = new Country(iso, name);
LOG.info("info: country: {}", country);
//write in db;
//service.save(country); <= can't do this because of the injection
}
}
}
}
based on Simon's comment. Here's how I resolved my problem. Might help people that are getting into spring, and that are trying not to get lost.
Basically, to inject anything in Spring, you'll need a SpringBootApplication
public class InsertCountriesJob implements CommandLineRunner{
private static final String file = "D:path\\to\\countries.dat";
private static final Logger LOG = LoggerFactory.getLogger(InsertCountriesJob.class);
#Autowired
public CountryService service;
public static void main(String[] args) {
LOG.info("STARTING THE APPLICATION");
SpringApplication.run(InsertCountriesJob.class, args);
LOG.info("APPLICATION FINISHED");
}
#Override
public void run(String... args) throws Exception {
LOG.info("Starting insert countries job");
try {
saveCountry();
} catch (Exception e) {
e.printStackTrace();
}
LOG.info("job over");
}
public void saveCountry() throws Exception {
try (CSVReader csvReader = new CSVReader(new FileReader(file))) {
String[] values = null;
while ((values = csvReader.readNext()) != null) {
String name = values[0];
String iso = values[1].equals("N") ? values[2] : values[1];
Country country = new Country(iso, name);
LOG.info("info: country: {}", country);
//write in db;
service.save(country);
}
}
}
}
I am using mockito-junit to test a piece of my code. As progressing I found out that there was an interface implemented in the main file which I was testing, when the test was running I found that the line where interface method is called get's covered but the real method doesn't get's covered.
This the code for the main file:
public class ExtractCurrencyDataTask {
private static final Logger LOGGER = LoggerFactory.getLogger(ExtractCurrencyDataTask.class);
#Autowired
private ExtractCurrencyService extractCurrencyService;
public void writeCurrencyListToFile(List<Currency> currencyList) {
if (currencyList != null && !currencyList.isEmpty()) {
String dir = "A path";
String fileName = "A filename";
String writeToFile = dir + "/" + fileName + ".writing";
String renameFile = dir + "/" + fileName + ".dat";
BufferedWriter writer = null;
FileWriter fileWriter = null;
try {
fileWriter = new FileWriter(writeToFile);
writer = new BufferedWriter(fileWriter);
extractCurrencyService.extractCurrencyList(currencyList, writer);
} catch (Exception e) {
throw new RuntimeException("Error writing Currency codes", e);
} finally {
if (writer != null) {
try {
writer.close();
fileWriter.close();
} catch (IOException e) {
LOGGER.info("Exception occured while closing the file writer", e);
}
moveFile(writeToFile, renameFile);
}
}
}
}
private void moveFile(String writeToFile, String renameFile) {
try {
FileUtils.moveFile(FileUtils.getFile(writeToFile), FileUtils.getFile(renameFile));
} catch (IOException e) {
LOGGER.info("Exception occured while moving file from writing to dat", e);
}
}
Here extractCurrencyService is the interface which I have mentioned.
The interface:
public interface ExtractCurrencyService {
public void extractCurrencyList(List<Currency> currency, Writer writer);
}
This the method definition which is done another file which implements same interface Filename:ExtractCurrencyServiceImpl.java
public class ExtractCurrencyServiceImpl implements ExtractCurrencyService {
private static final String SEP = "|";
private static final String NEWLINE = "\n";
#Override
public void extractCurrencyList(List<Currency> currencyList, Writer writer) {
if (currencyList != null) {
currencyList.forEach(currency -> {
String code = currency.getCode();
String name = currency.getName() == null ? "" : currency.getName();
Long noOfDecimals = currency.getNumberOfDecimals();
RoundingMethodValue roundingMethod = currency.getRoundingMethod();
boolean isDealCurrency = currency.isDealCurrency();
String description = currency.getDescription() == null ? "" : currency.getDescription();
try {
writer.write(createCurrencyDataLine(code,
name,
noOfDecimals,
roundingMethod,
isDealCurrency,
description));
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
}
private String createCurrencyDataLine(String code,
String name,
Long noOfDecimals,
RoundingMethodValue roundingMethod,
boolean isdealCurrency,
String description) {
return code + SEP + name + SEP + noOfDecimals.toString() + SEP + roundingMethod.toString() + SEP
+ isdealCurrency + SEP + description + NEWLINE;
}
public static <T> Predicate<T> distinctByKey(Function<? super T, Object> keyExtractor) {
Map<Object, Boolean> map = new ConcurrentHashMap<>();
return t -> map.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null;
}
}
This is the test file:
#RunWith(MockitoJUnitRunner.class)
public class ExtractCurrencyDataTaskTest {
#Mock
private Currency mockCurrency;
#Mock
private ExtractCurrencyService mockExtractCurrencyService;
#Mock
private BufferedWriter mockBufferWriter;
#Mock
private Bean mockBean;
#InjectMocks
private ExtractCurrencyDataTask extractCurrencyDataTask;
#Test
public void writeCurrencyListToFileTest() {
List<Currency> currencyList = new ArrayList();
when(mockCurrency.getCode()).thenReturn("USD");
when(mockCurrency.getNumberOfDecimals()).thenReturn((long) 2);
when(mockCurrency.getRoundingMethod()).thenReturn(enum value);
when(mockCurrency.isDealCurrency()).thenReturn(true);
when(mockCurrency.getName()).thenReturn("US Dollars");
when(mockCurrency.getDescription()).thenReturn("Currency Description");
currencyList.add(mockCurrency);
extractCurrencyDataTask.writeCurrencyListToFile(currencyList);
}
}
This the configuration for Autowired bean
#Bean
public ExtractCurrencyService extractCurrencyService() {
return new ExtractCurrencyServiceImpl();
}
As you can see the real output of this process is a file will be created in a path mentioned with some data. Here in this test I am mocking the data and passing it to main file. Main file is the created file in respective path but there is no data in the file.
The data writing part is done by the interface method. This is the part where I need help.
Thanks in advance....
You are injecting a mock of ExtractCurrencyService into your tested class. So the test is running with this mock instance instead of ExtractCurrencyServiceImpl. The current behaviour is that your ExtractCurrencyDataTasktested class is calling to extractCurrencyService#extractCurrencyList, but this extractCurrencyService is a mock, not your real implementation, so the call is done but it does nothing.
If you want to unit test ExtractCurrencyDataTask then thats ok, but maybe you should assert the call to extractCurrencyService#extractCurrencyList is done in the way you expect.
If you want to unit test ExtractCurrencyServiceImpl then create a unit test for this class.
If you want to test the interaction between these two classes then create an integration test where ExtractCurrencyDataTask has injected a real instance of ExtractCurrencyServiceImpl, not a mock.
I am annotating an huge amount of Strings as CoreDocuments through Stanford Corenlp. StanfordCoreNLP pipelines have an internal feature for multithreaded annotating to optimize the process however as far as i can see CoreDocument objects cant use that feature in the version i run,- which is stanford-corenlp-full-2018-10-05.
Since I could not make Pipelines Annotate collections of CoreDocuments I instead tried to optimize the individual annotations by placing them inside multithreaded methods. I have no Issues with the multithreaded environment. I receive all results back as expected, my only drawback is the time consumption. I tried about 7 different implementation and these were the 3 fastest:
//ForkJoinPool is initialized in the main method in my application
private static ForkJoinPool executor = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, false);
public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWay1(Collection<String> str) {
ConcurrentMap<String, CoreDocument> pipelineCoreDocumentAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
str.parallelStream().forEach((str1) -> {
CoreDocument coreDocument = new CoreDocument(str1);
pipeline.annotate(coreDocument);
pipelineCoreDocumentAnnotations.put(str1, coreDocument);
System.out.println("pipelineCoreDocumentAnnotations size1: " + pipelineCoreDocumentAnnotations.size() + "\nstr size: " + str.size() + "\n");
});
return pipelineCoreDocumentAnnotations;
}
public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWay4(Collection<String> str) {
ConcurrentMap<String, CoreDocument> pipelineCoreDocumentAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
str.parallelStream().forEach((str1) -> {
try {
ForkJoinTask<CoreDocument> forkCD = new RecursiveTask() {
#Override
protected CoreDocument compute() {
CoreDocument coreDocument = new CoreDocument(str1);
pipeline.annotate(coreDocument);
return coreDocument;
}
};
forkCD.invoke();
pipelineCoreDocumentAnnotations.put(str1, forkCD.get());
System.out.println("pipelineCoreDocumentAnnotations2 size: " + pipelineCoreDocumentAnnotations.size() + "\nstr size: " + str.size() + "\n");
} catch (InterruptedException | ExecutionException ex) {
Logger.getLogger(Parsertest.class.getName()).log(Level.SEVERE, null, ex);
}
});
return pipelineCoreDocumentAnnotations;
}
public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWay7(ConcurrentMap<Integer, String> hlstatsSTR) {
RecursiveDocumentAnnotation recursiveAnnotation = new RecursiveDocumentAnnotation(hlstatsSTR, pipeline);
ConcurrentMap<String, CoreDocument> returnMap = new MapMaker().concurrencyLevel(2).makeMap();
executor.execute(recursiveAnnotation);
try {
returnMap = recursiveAnnotation.get();
} catch (InterruptedException | ExecutionException ex) {
Logger.getLogger(Parsertest.class.getName()).log(Level.SEVERE, null, ex);
}
System.out.println("reached end\n");
return returnMap;
}
RecursiveDocumentAnnotation class:
public class RecursiveDocumentAnnotation extends RecursiveTask<ConcurrentMap<String, CoreDocument>> {
private String str;
private StanfordCoreNLP nlp;
private static ConcurrentMap<String, CoreDocument> pipelineCoreDocumentAnnotations;
private static ConcurrentMap<Integer, String> hlstatsStrMap;
public static ConcurrentMap<String, CoreDocument> getPipelineCoreDocumentAnnotations() {
return pipelineCoreDocumentAnnotations;
}
public RecursiveDocumentAnnotation(ConcurrentMap<Integer, String> hlstatsStrMap, StanfordCoreNLP pipeline) {
this.pipelineCoreDocumentAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
this.str = hlstatsStrMap.get(0);
this.nlp = pipeline;
this.hlstatsStrMap = hlstatsStrMap;
}
public RecursiveDocumentAnnotation(ConcurrentMap<Integer, String> hlstatsStrMap, StanfordCoreNLP pipeline,
ConcurrentMap<String, CoreDocument> returnMap) {
this.str = hlstatsStrMap.get(returnMap.size());
this.nlp = pipeline;
this.hlstatsStrMap = hlstatsStrMap;
this.pipelineCoreDocumentAnnotations = returnMap;
}
#Override
protected ConcurrentMap<String, CoreDocument> compute() {
CoreDocument coreDocument = new CoreDocument(str);
nlp.annotate(coreDocument);
pipelineCoreDocumentAnnotations.put(str, coreDocument);
System.out.println("hlstatsStrMap size: " + hlstatsStrMap.size() + "\npipelineCoreDocumentAnnotations size: " + pipelineCoreDocumentAnnotations.size()
+ "\n");
if (pipelineCoreDocumentAnnotations.size() >= hlstatsStrMap.size()) {
return pipelineCoreDocumentAnnotations;
}
RecursiveDocumentAnnotation recursiveAnnotation = new RecursiveDocumentAnnotation(hlstatsStrMap, nlp, pipelineCoreDocumentAnnotations);
recursiveAnnotation.fork();
return recursiveAnnotation.join();
} }
Time parallel1: 336562 ms.
Time parallel4: 391556 ms.
Time parallel7: 491639 ms.
What honnestly would be the greatest would be if the pipeline by itself could do the multi annotation somehow, however as long as I would not know how to achieve this I hope somebody could perharps explain me how to optimize the CoreDocument annotations individually.
PS: Mashing all the strings together into a single coredocument for annotation would also not be what i want since i need the Coredocuments individually for comparrisions afterwards.
I didn't time this, but you could try this sample code (add test Strings to the list of Strings)...it should work on 4 documents at the same time:
package edu.stanford.nlp.examples;
import edu.stanford.nlp.pipeline.*;
import java.util.*;
import java.util.function.*;
import java.util.stream.*;
public class MultiThreadStringExample {
public static class AnnotationCollector<T> implements Consumer<T> {
List<T> annotations = new ArrayList<T>();
public void accept(T ann) {
annotations.add(ann);
}
}
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,depparse");
props.setProperty("threads", "4");
StanfordCoreNLP pipeline = new StanfordCoreNLP(props);
AnnotationCollector<Annotation> annCollector = new AnnotationCollector<Annotation>();
List<String> exampleStrings = new ArrayList<String>();
for (String exampleString : exampleStrings) {
pipeline.annotate(new Annotation(exampleString), annCollector);
}
Thread.sleep(10000);
List<CoreDocument> coreDocs =
annCollector.annotations.stream().map(ann -> new CoreDocument(ann)).collect(Collectors.toList());
for (CoreDocument coreDoc : coreDocs) {
System.out.println(coreDoc.tokens());
}
}
}
I have a Singleton class which connects to Cassandra. I want to initialize processMetadata, procMetadata and topicMetadata all at once not one by one. If they gets initialize all at once then I will see consistent values from all those three not different values for either of them.
In the below code, processMetadata, procMetadata and topicMetadata is initialized for the first time inside initializeMetadata method and then it gets updated every 15 minutes.
public class CassUtil {
private static final Logger LOGGER = Logger.getInstance(CassUtil.class);
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
// below are my three metedata which I need to update all three at once not one by one
private List<ProcessMetadata> processMetadata = new ArrayList<>();
private List<ProcMetadata> procMetadata = new ArrayList<>();
private List<String> topicMetadata = new ArrayList<>();
private Session session;
private Cluster cluster;
private static class Holder {
private static final CassUtil INSTANCE = new CassUtil();
}
public static CassUtil getInstance() {
return Holder.INSTANCE;
}
private CassUtil() {
List<String> servers = TestUtils.HOSTNAMES;
String username = TestUtils.USERNAME;
String password = TestUtils.PASSWORD;
PoolingOptions opts = new PoolingOptions();
opts.setCoreConnectionsPerHost(HostDistance.LOCAL,
opts.getCoreConnectionsPerHost(HostDistance.LOCAL));
Builder builder = Cluster.builder();
cluster =
builder
.addContactPoints(servers.toArray(new String[servers.size()]))
.withRetryPolicy(DowngradingConsistencyRetryPolicy.INSTANCE)
.withPoolingOptions(opts)
.withReconnectionPolicy(new ConstantReconnectionPolicy(100L))
.withLoadBalancingPolicy(
DCAwareRoundRobinPolicy
.builder()
.withLocalDc(
!TestUtils.isProduction() ? "DC2" : TestUtils.getCurrentLocation()
.get().name().toLowerCase()).build())
.withCredentials(username, password).build();
try {
session = cluster.connect("testkeyspace");
} catch (NoHostAvailableException ex) {
LOGGER.logError("error= ", ExceptionUtils.getStackTrace(ex));
} catch (Exception ex) {
LOGGER.logError("error= " + ExceptionUtils.getStackTrace(ex));
}
}
// start a background thread which runs every 15 minutes
public void startScheduleTask() {
scheduler.scheduleAtFixedRate(new Runnable() {
public void run() {
try {
processMetadata = processMetadata(true);
topicMetadata = listOfTopic(TestUtils.GROUP_ID);
procMetadata = procMetadata();
} catch (Exception ex) {
LOGGER.logError("error= ", ExceptionUtils.getStackTrace(ex));
}
}
}, 0, 15, TimeUnit.MINUTES);
}
// called from main thread to initialize the metadata
// and start the background thread where it gets updated
// every 15 minutes
public void initializeMetadata() {
processMetadata = processMetadata(true);
topicMetadata = listOfTopic(TestUtils.GROUP_ID);
procMetadata = procMetadata();
startScheduleTask();
}
private List<String> listOfTopic(final String consumerName) {
List<String> listOfTopics = new ArrayList<>();
String sql = "select topics from topic_metadata where id=1 and consumerName=?";
try {
// get data from cassandra
} catch (Exception ex) {
LOGGER.logError("error= ", ExceptionUtils.getStackTrace(ex), ", Consumer Name= ",
consumerName);
}
return listOfTopics;
}
private List<ProcessMetadata> processMetadata(final boolean flag) {
List<ProcessMetadata> metadatas = new ArrayList<>();
String sql = "select * from process_metadata where id=1 and is_active=?";
try {
// get data from cassandra
} catch (Exception ex) {
LOGGER.logError("error= ", ExceptionUtils.getStackTrace(ex), ", active= ", flag);
}
return metadatas;
}
private List<ProcMetadata> procMetadata() {
List<ProcMetadata> metadatas = new ArrayList<>();
String sql = "select * from schema where id=1";
try {
// get data from cassandra
} catch (SchemaParseException ex) {
LOGGER.logError("schema parsing error= ", ExceptionUtils.getStackTrace(ex));
} catch (Exception ex) {
LOGGER.logError("error= ", ExceptionUtils.getStackTrace(ex));
}
return metadatas;
}
public List<ProcessMetadata> getProcessMetadata() {
return processMetadata;
}
public List<String> getTopicMetadata() {
return topicMetadata;
}
public List<ProcMetadata> getProcMetadata() {
return procMetadata;
}
}
So from my main thread, I call initializeMetadata method only once which initializes those three metadata and then it starts a background thread which updates them every 15 minutes. Afer that I was using them like below from my multiple threads:
CassUtil.getInstance().getProcessMetadata();
CassUtil.getInstance().getTopicMetadata();
CassUtil.getInstance().getProcMetadata();
Problem Statement:-
Now I want to see same state of processMetadata, topicMetadata and procMetadata. Meaning these three metadata should be updated at same time not one after other bcoz I don't want to see mix state value for them after I do get on them.
How can I avoid this issue? Do I need to create another class which will hold these three metadata as constructor parameter?
The most efficient way to keep a consistent state of your lists can be to use an immutable class that will hold your 3 lists, you will then have a field of this type in your class that you will define volatile to make sure that all threads see the latest update of this field.
Here is for example the immutable class that we use to hold the state of the lists (it could be an ordinary class but as it is implementation specific it could be a static inner class):
private static class State {
private final List<ProcessMetadata> processMetadata;
private final List<ProcMetadata> procMetadata;
private final List<String> topicMetadata;
public State(final List<ProcessMetadata> processMetadata,
final List<ProcMetadata> procMetadata, final List<String> topicMetadata) {
this.processMetadata = new ArrayList<>(processMetadata);
this.procMetadata = new ArrayList<>(procMetadata);
this.topicMetadata = new ArrayList<>(topicMetadata);
}
// Getters
}
Then your class would be something like that:
public class CassUtil {
...
private volatile State state = new State(
new ArrayList<>(), new ArrayList<>(), new ArrayList<>()
);
...
public void startScheduleTask() {
...
this.state = new State(
processMetadata(true), listOfTopic(TestUtils.GROUP_ID),
procMetadata()
);
...
}
...
public void initializeMetadata() {
this.state = new State(
processMetadata(true), listOfTopic(TestUtils.GROUP_ID), procMetadata()
);
startScheduleTask();
}
...
public List<ProcessMetadata> getProcessMetadata() {
return this.state.getProcessMetadata();
}
public List<String> getTopicMetadata() {
return this.state.getTopicMetadata();
}
public List<ProcMetadata> getProcMetadata() {
return this.state.getProcMetadata();
}
I have to write some dao tests for project where I want to:
create DDL schema from database (MySQL);
create tables in another test database in memory (H2);
insеrt some data to database;
select the just inserted item;
check some data from this item.
This is my test:
public class BridgeDBTest {
private static String JDBC_DRIVER;
private static String JDBC_URL;
private static String USER;
private static String PSWD;
private static final Logger logger = LoggerFactory.getLogger(BridgeDBTest.class);
#BeforeGroups(groups = "bridgeDB")
public void init(){
try {
JDBC_DRIVER = org.h2.Driver.class.getName();
JDBC_URL = "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1";
USER = "root";
PSWD = "";
new HibernateTestUtil().setDialect("org.hibernate.dialect.HSQLDialect")
.translateCreateDllToOutputStream(new FileOutputStream(new File("src/test/resources/createSchema.sql")));
RunScript.execute(JDBC_URL, USER, PSWD, "src/test/resources/createSchema.sql", Charset.forName("UTF8"), false);
insertDataset(readDataSet());
}
catch (Exception expt) {
expt.printStackTrace();
logger.error("!!!" + expt);
throw new RuntimeException(expt.getMessage());
}
}
#Test(groups = "bridgeDB")
public void getItem(){
BridgeDAOImpl dao = new BridgeDAOImpl();
dao.setSessionFactory(new HibernateTestUtil().getSessionFactory());
try {
Bridge bridge = dao.get(1L);
assert(bridge.getName().equals("TEST-CN-DEVBOX01"));
} catch (ServiceException e) {
e.printStackTrace();
}
}
#AfterGroups(groups = "bridgeDB")
public void dropTables(){
try {
new HibernateTestUtil().setDialect("org.hibernate.dialect.HSQLDialect")
.translateDropDllToOutputStream(new FileOutputStream(new File("src/test/resources/dropSchema.sql")));
}
catch (Exception expt) {
expt.printStackTrace();
logger.error("!!!" + expt);
throw new RuntimeException(expt.getMessage());
}
}
private IDataSet readDataSet() throws Exception{
return new FlatXmlDataSetBuilder().build(new File("src/test/resources/datasetForTest.xml"));
}
private void insertDataset(IDataSet dataSet) throws Exception{
IDatabaseTester databaseTester = new JdbcDatabaseTester(JDBC_DRIVER, JDBC_URL, USER, PSWD);
databaseTester.setSetUpOperation(DatabaseOperation.CLEAN_INSERT);
databaseTester.setDataSet(dataSet);
databaseTester.onSetup();
}
}
BridgeDAOImplused class HibernateUtilfrom src/main/..., but I need to use my class HibernateTestUtil from src/test/.... It's modified HibernateUtil fitted for my test (there I set parameters for Configuration class).
BridgeDAOImpl (See 5 line in try block):
public class BridgeDAOImpl extends GenericDAOImpl<Bridge, Long> implements BridgeDAO {
//...
public SearchResult<Bridge> list(int from, int limit, String filter, String order, Long authId) throws ServiceException {
SearchResult<Bridge> results = null;
Search search = new Search(Bridge.class);
Session session = getSessionFactory().getCurrentSession();
Transaction transaction = null;
try {
transaction = session.beginTransaction();
search.setFirstResult(from);
search.setMaxResults(limit);
HibernateUtil.buildSearch(filter, order, search, aliases);
results = searchAndCount(search);
transaction.commit();
}
catch (Exception expt) {
logger.error("!!!", expt);
if (transaction != null) {
transaction.rollback();
}
throw new ServiceException(expt.getMessage());
}
finally {
// session.close();
}
return results;
}
//...
}
How I can test my dao without modifying it?