I found this code on stackoverflow that explain how to get all running process
on windows, this get name and pid
Kernel32 kernel32 = (Kernel32) Native.loadLibrary(Kernel32.class, W32APIOptions.UNICODE_OPTIONS);
Tlhelp32.PROCESSENTRY32.ByReference processEntry = new Tlhelp32.PROCESSENTRY32.ByReference();
WinNT.HANDLE snapshot = kernel32.CreateToolhelp32Snapshot(Tlhelp32.TH32CS_SNAPPROCESS, new WinDef.DWORD(0));
try {
while (kernel32.Process32Next(snapshot, processEntry)) {
System.out.println(processEntry.th32ProcessID + "\t" + Native.toString(processEntry.szExeFile));
}
} finally {
kernel32.CloseHandle(snapshot);
}
My question is: how can i get the process path?
Using JNA, you need to define the MODULEENTRY32 structure and map some required functions :
import java.util.Arrays;
import java.util.List;
import com.sun.jna.Native;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import com.sun.jna.platform.win32.Kernel32;
import com.sun.jna.platform.win32.WinDef;
import com.sun.jna.win32.W32APIOptions;
public interface ProcessPathKernel32 extends Kernel32 {
class MODULEENTRY32 extends Structure {
public static class ByReference extends MODULEENTRY32 implements Structure.ByReference {
public ByReference() {
}
public ByReference(Pointer memory) {
super(memory);
}
}
public MODULEENTRY32() {
dwSize = new WinDef.DWORD(size());
}
public MODULEENTRY32(Pointer memory) {
super(memory);
read();
}
public DWORD dwSize;
public DWORD th32ModuleID;
public DWORD th32ProcessID;
public DWORD GlblcntUsage;
public DWORD ProccntUsage;
public Pointer modBaseAddr;
public DWORD modBaseSize;
public HMODULE hModule;
public char[] szModule = new char[255+1]; // MAX_MODULE_NAME32
public char[] szExePath = new char[MAX_PATH];
public String szModule() { return Native.toString(this.szModule); }
public String szExePath() { return Native.toString(this.szExePath); }
#Override
protected List<String> getFieldOrder() {
return Arrays.asList(new String[] {
"dwSize", "th32ModuleID", "th32ProcessID", "GlblcntUsage", "ProccntUsage", "modBaseAddr", "modBaseSize", "hModule", "szModule", "szExePath"
});
}
}
ProcessPathKernel32 INSTANCE = (ProcessPathKernel32)Native.loadLibrary(ProcessPathKernel32.class, W32APIOptions.UNICODE_OPTIONS);
boolean Module32First(HANDLE hSnapshot, MODULEENTRY32.ByReference lpme);
boolean Module32Next(HANDLE hSnapshot, MODULEENTRY32.ByReference lpme);
}
then you retrieve the processes and for each PID, retrieve the Module information (the path of the module is now available). If running from a 32bits process then you only get the Module information from 32bits processes (the path will be blank for the 64bits processes).
import com.sun.jna.Native;
import com.sun.jna.platform.win32.Kernel32;
import com.sun.jna.platform.win32.Kernel32Util;
import com.sun.jna.platform.win32.Tlhelp32;
import com.sun.jna.platform.win32.WinDef;
import com.sun.jna.platform.win32.WinNT;
import com.sun.jna.win32.W32APIOptions;
public class ProcessPathAll {
public static void main(String ... args) {
Kernel32 kernel32 = (Kernel32) Native.loadLibrary(Kernel32.class, W32APIOptions.DEFAULT_OPTIONS);
Tlhelp32.PROCESSENTRY32.ByReference processEntry = new Tlhelp32.PROCESSENTRY32.ByReference();
WinNT.HANDLE processSnapshot =
kernel32.CreateToolhelp32Snapshot(Tlhelp32.TH32CS_SNAPPROCESS, new WinDef.DWORD(0));
try {
while (kernel32.Process32Next(processSnapshot, processEntry)) {
// looks for a specific process
// if (Native.toString(processEntry.szExeFile).equalsIgnoreCase("textpad.exe")) {
System.out.print(processEntry.th32ProcessID + "\t" + Native.toString(processEntry.szExeFile) + "\t");
WinNT.HANDLE moduleSnapshot =
kernel32.CreateToolhelp32Snapshot(Tlhelp32.TH32CS_SNAPMODULE, processEntry.th32ProcessID);
try {
ProcessPathKernel32.MODULEENTRY32.ByReference me = new ProcessPathKernel32.MODULEENTRY32.ByReference();
ProcessPathKernel32.INSTANCE.Module32First(moduleSnapshot, me);
System.out.print(": " + me.szExePath() );
System.out.println();
}
finally {
kernel32.CloseHandle(moduleSnapshot);
}
// }
}
}
finally {
kernel32.CloseHandle(processSnapshot);
}
}
}
Related
I'm trying to add a texture to an item I have, and it's not loading in Minecraft. Here's my base mod class:
package fr.doufut.test;
import fr.doufut.test.events.RegisteringEvent;
import fr.doufut.test.proxy.CommonProxy;
import fr.doufut.test.utils.Reference;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.SidedProxy;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPostInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
#Mod(modid = Reference.MODID, name = Reference.NAME , version = Reference.VERSION)
public class TestMain {
#Mod.Instance(Reference.MODID)
public static TestMain instance;
#SidedProxy(clientSide = Reference.CP, serverSide = Reference.SP)
public static CommonProxy proxy;
public TestMain()
{
MinecraftForge.EVENT_BUS.register(new RegisteringEvent());
}
#Mod.EventHandler
public void preinit(FMLPreInitializationEvent e)
{
proxy.preinit();
}
#Mod.EventHandler
public void init(FMLInitializationEvent e)
{
proxy.init();
}
#Mod.EventHandler
public void postinit(FMLPostInitializationEvent e)
{
proxy.postinit();
}
}
My common proxy class:
package fr.doufut.test.proxy;
public class CommonProxy {
public void preinit()
{
}
public void init()
{
}
public void postinit()
{
}
}
My Client Proxy class:
package fr.doufut.test.proxy;
import fr.doufut.test.init.ModItems;
import net.minecraftforge.common.MinecraftForge;
public class ClientProxy extends CommonProxy
{
#Override
public void preinit() {
super.preinit();
MinecraftForge.EVENT_BUS.register(ModItems.INSTANCE);
}
#Override
public void init() {
super.init();
}
#Override
public void postinit() {
super.postinit();
}
}
Here is the actual ModItem class:
package fr.doufut.test.init;
import com.google.common.collect.Lists;
import fr.doufut.test.items.HDOItems;
import fr.doufut.test.utils.Reference;
import net.minecraft.client.renderer.block.model.ModelResourceLocation;
import net.minecraft.item.Item;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.client.event.ModelRegistryEvent;
import net.minecraftforge.client.model.ModelLoader;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import java.util.List;
public class ModItems {
public static final ModItems INSTANCE = new ModItems();
public static Item coca;
private List<Item> items;
public void init()
{
items = Lists.newArrayList();
coca = new HDOItems("coca");
}
#SubscribeEvent
public void registerModels(ModelRegistryEvent e)
{
for (Item item : items)
{
registerModel(item);
}
}
private void registerModel(Item item)
{
ModelLoader.setCustomModelResourceLocation(item, 0, new ModelResourceLocation(new ResourceLocation(Reference.MODID, item.getUnlocalizedName().substring(5)), "inventory"));
}
public List<Item> getItems()
{
return items;
}
}
Here is the HDOItem class file:
package fr.doufut.test.items;
import fr.doufut.test.init.ModItems;
import net.minecraft.item.Item;
public class HDOItems extends Item
{
public HDOItems(String name)
{
setRegistryName(name).setUnlocalizedName(name);
ModItems.INSTANCE.getItems().add(this);
}
}
Here is the item's json model file:
{
"parent": "item/generated",
"textures": {
"layer0": "hdo:/items/coca"
}
}
My folder structure:
Json file: C:\Users\rober\Desktop\forge-1.12.2-14.23.5.2859-mdk\src\main\resources\assets\hdo\models\item\coca.json
PNG file: C:\Users\rober\Desktop\forge-1.12.2-14.23.5.2859-mdk\src\main\resources\assets\hdo\textures\items.json
Doubt, your PNG file is "items.json" and not "cocoa.png"?
As far as I know, in the "texture" folder there should be 2 folders:
"blocks", "items".
Inside "textures/items" goes the file "cocoa.png"
When working with azure BlobStorage I'm quite new to this topic but I managed to get it working in java. So we have some xml files saved there and collect the file list as strings. Now I've tried to create a unit tests to verify it stays working and since the getFiles() function is a very small I expected it to be very simple to test.
#Override
public List<String> getFiles(ExecutionContext context) {
return StreamSupport.stream(blobContainerClient.listBlobs().spliterator(), true)
.map(BlobItem::getName)
.collect(Collectors.toList());
}
I can mock the com.azure.storage.blob.blobContainerClient and its function listBlobs, but when trying to create the PagedIterable from a simple List I cannot make it fit the right data types or it runs into an endless loop.
Since the functionality is so minimal, we would just skip to test this, but ou of curiosity I just want to know if it could be tested or what is wrong with my code:
import com.azure.core.http.rest.*;
import com.azure.core.util.IterableStream;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.models.BlobItem;
import com.microsoft.azure.functions.ExecutionContext;
import lombok.SneakyThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.core.CoreSubscriber;
import reactor.core.Fuseable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.function.Supplier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.*;
class BlobstoreConnectorListFilesTest {
private final BlobContainerClient blobContainerClientMock = mock(BlobContainerClient.class);
private final ExecutionContext context = mock(ExecutionContext.class);
private final String id1 = UUID.randomUUID().toString();
private final String id2 = UUID.randomUUID().toString();
#BeforeEach
void setUp() {
BlobItem item1 = mock(BlobItem.class);
when(item1.getName()).thenReturn(id1 + ".xml");
BlobItem item2 = mock(BlobItem.class);
when(item2.getName()).thenReturn(id2 + ".xml");
List<BlobItem> arrayList = new ArrayList<>();
arrayList.add(item1);
arrayList.add(item2);
Mono<PagedResponse<BlobItem>> monoSource = new Mono<>() {
private final Page<BlobItem> page = new Page<>() {
#Override
public IterableStream<BlobItem> getElements() {
return new IterableStream<>(Flux.fromIterable(arrayList));
}
#Override
public String getContinuationToken() {
return null;
}
};
final PagedResponseBase<String, BlobItem> pagedResponseBase = new PagedResponseBase<>(null, 200, null, page
, null);
final Fuseable.QueueSubscription<BlobItem> fuseableQueueSubscription = new Fuseable.QueueSubscription<>() {
#Override
public void request(long l) {
}
#SneakyThrows
#Override
public void cancel() {
throw new InterruptedException();
}
#Override
public int size() {
return arrayList.size();
}
#Override
public boolean isEmpty() {
return arrayList.isEmpty();
}
#Override
public void clear() {
arrayList.clear();
}
#Override
public BlobItem poll() {
var value = arrayList.stream().findFirst().orElse(null);
if(value!=null){
arrayList.remove(value);
}
return value;
}
#Override
public int requestFusion(int i) {
return 0;
}
};
#Override
public void subscribe(CoreSubscriber<? super PagedResponse<BlobItem>> coreSubscriber) {
coreSubscriber.onNext(pagedResponseBase);
coreSubscriber.onSubscribe(fuseableQueueSubscription);
}
};
Supplier<Mono<PagedResponse<BlobItem>>> blobItemSupplier = () -> monoSource;
PagedFlux<BlobItem> pagedFlux = new PagedFlux<>(blobItemSupplier);
PagedIterable<BlobItem> leaflets = new PagedIterable<>(pagedFlux);
doReturn(leaflets).when(blobContainerClientMock).listBlobs();
}
#Test
void getAllFiles() {
BlobstoreConnector connector = new BlobstoreConnector(blobContainerClientMock);
List<String> actual = connector.getFiles(context);
assertEquals(2, actual.size());
assertTrue(actual.stream().anyMatch(fileName -> fileName.equals(id1 + ".xml")));
assertTrue(actual.stream().anyMatch(fileName -> fileName.equals(id2 + ".xml")));
}
}
I am writing data to accumulo storage natively using Geomesa Native Client. Here is my java code
package org.locationtech.geomesa.api;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.security.Authorizations;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.feature.AttributeTypeBuilder;
import org.geotools.geometry.jts.JTSFactoryFinder;
import org.junit.Assert;
import org.junit.Test;
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore;
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex;
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex$;
import org.locationtech.geomesa.utils.index.IndexMode$;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.filter.FilterFactory2;
import javax.annotation.Nullable;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
public class WorkerBeta {
public static void main(String[] args){
try {
DomainObjectValueSerializer dovs = new DomainObjectValueSerializer();
final GeoMesaIndex<DomainObject> index = AccumuloGeoMesaIndex.buildWithView(
"aj_v14",
"localhost:2181",
"hps",
"root", "9869547580",
false,
dovs,
new SimpleFeatureView<DomainObject>() {
AttributeTypeBuilder atb = new AttributeTypeBuilder();
private List<AttributeDescriptor> attributeDescriptors =
Lists.newArrayList(atb.binding(Integer.class).buildDescriptor("rId")
, atb.binding(String.class).buildDescriptor("dId")
, atb.binding(Integer.class).buildDescriptor("s")
, atb.binding(Integer.class).buildDescriptor("a")
, atb.binding(Integer.class).buildDescriptor("e")
);
#Override
public void populate(SimpleFeature f, DomainObject domainObject, String id, byte[] payload, Geometry geom, Date dtg) {
f.setAttribute("rId", domainObject.rideId);
f.setAttribute("dId", domainObject.deviceId);
f.setAttribute("s", domainObject.speed);
f.setAttribute("a", domainObject.angle);
f.setAttribute("e", domainObject.error);
}
#Override
public List<AttributeDescriptor> getExtraAttributes() {
return attributeDescriptors;
}
}
);
//Inserting
final DomainObject one = new DomainObject(1, "AJJASsP", 12, 40, 1);
final GeometryFactory gf = JTSFactoryFinder.getGeometryFactory();
System.out.println(index.insert(
one,
gf.createPoint(new Coordinate(-74.0, 34.0)),
date("2017-03-31T01:15:00.000Z")
));
//Read
GeoMesaQuery q = GeoMesaQuery.GeoMesaQueryBuilder.builder()
.within(-90.0, -180, 90, 180)
.during(date("2017-01-01T00:00:00.000Z"), date("2017-04-01T00:00:00.000Z"))
.build();
Iterable<DomainObject> results = index.query(q);
int counter = 0;
for(DomainObject dm : results){
counter += 1;
System.out.println("result counter: " + counter);
dovs.toBytes(dm);
}
}
catch (Exception ex){
ex.printStackTrace();
}
index.close();
}
public static class DomainObject {
public final int rideId;
public final String deviceId;
public final int angle;
public final int speed;
public final int error;
public DomainObject(int rideId, String deviceId, int angle, int speed, int error) {
this.rideId = rideId;
this.deviceId = deviceId;
this.angle = angle;
this.speed = speed;
this.error = error;
}
}
public static class DomainObjectValueSerializer implements ValueSerializer<DomainObject> {
public static final Gson gson = new Gson();
#Override
public byte[] toBytes(DomainObject o) {
return gson.toJson(o).getBytes();
}
#Override
public DomainObject fromBytes(byte[] bytes) {
return gson.fromJson(new String(bytes), DomainObject.class);
}
}
public static Date date(String s) {
return Date.from(ZonedDateTime.parse(s).toInstant());
}
}
The problem with this code is, I need to create index object every time for a new insert request and call index.close() to reflect the same. But I can't execute insert() agin, once index.close() is called. However i will be accepting insert request from queue at very high rate and I don't want to create index object every time. How can i do that?
In short how i can flush writes without calling close().
I created geomesa Client class file to use geomesa natively. Below is the partial implementation of the same which shows how you can flush with AccumuloAppendFeatureWriter without calling to close.
public class GeomesaClient {
private AccumuloDataStore ds = null;
private AccumuloAppendFeatureWriter fw = null;
private SimpleFeatureSource sfs = null;
private String tableName = "";
private FeatureStore fst = null;
private SimpleFeatureType sft;
public GeomesaClient(Map<String, String> dsConf) throws Exception {
this.ds = (AccumuloDataStore) DataStoreFinder.getDataStore(dsConf);
this.tableName = dsConf.get("tableName");
sft = createFeatureType();
if(!Arrays.asList(this.ds.getTypeNames()).contains(sft.getTypeName())){
ds.createSchema(sft);
}
this.fst = (FeatureStore)sfs;
this.fw = (AccumuloAppendFeatureWriter) (this.ds.getFeatureWriterAppend(sft.getTypeName(),
Transaction.AUTO_COMMIT));
this.sfs = ds.getFeatureSource(sft.getTypeName());
}
/*
Flush with AccumuloAppendFeatureWriter
*/
public void flush(boolean force) {
fw.flush();
}
}
I am using this Xtest code to create a demo homeAutomation language github source but when the automatic code is generated i am getting errors in file AbstractHomeAutomationRuntimeModule like "type mismatch: cannot convert from Class to Class" i'm adding a pic for further referance
i got this error after adding the file AbstractHomeAutomationRuntimeModule RulesGenerator.xtend
/*
* generated by Xtext 2.10.0
*/
package org.xtext.example.home.generator
import org.eclipse.emf.ecore.resource.Resource
import org.eclipse.xtext.generator.AbstractGenerator
import org.eclipse.xtext.generator.IFileSystemAccess2
import org.eclipse.xtext.generator.IGeneratorContext
import org.xtext.example.home.homeAutomation.Device
import org.xtext.example.home.homeAutomation.Model
import java.util.Scanner
import org.eclipse.emf.ecore.resource.Resource
import org.eclipse.xtext.generator.IFileSystemAccess
import org.eclipse.xtext.generator.IGenerator
import org.eclipse.xtend.lib.macro.declaration.Declaration
import org.xtext.example.home.homeAutomation.Rule
https://www.eclipse.org/Xtext/documentation/303_runtime_concepts.html#code-generation
*/
class RulesGenerator implements IGenerator {
override void doGenerate(Resource resource, IFileSystemAccess fsa) {
val simpleClassName = resource.getURI.trimFileExtension.lastSegment
if (resource.contents?.head == null) {
return;
}
val declarations = resource.contents.head.eContents.filter(Declaration)
fsa.generateFile(simpleClassName + '.java', '''
public class «simpleClassName» {
public static void fire(String event) {
«FOR device : declarations.filter(Device)»
«FOR state : device.states»
if (event.equals("«state.name»")) {
System.out.println("«device.name» is now «state.name»!");
}
«ENDFOR»
«ENDFOR»
«FOR rule : declarations.filter(Rule)»
if (event.equals("«rule.when.name»")) {
fire("«rule.then.name»");
}
«ENDFOR»
}
public static void main(String... args) {
try («Scanner.name» scanner = new «Scanner.name»(System.in)) {
System.out.println("Welcome home!");
System.out.println("Available commands : ");
«FOR device : declarations.filter(Device)»
«FOR state : device.states»
System.out.println(" «device.name» «state.name»" );
«ENDFOR»
«ENDFOR»
System.out.println("Have fun!");
while(true) {
String command = scanner.next();
«FOR device : declarations.filter(Device)»
if (command.equalsIgnoreCase("«device.name»")) {
String secondaryCommand = scanner.next();
«FOR state : device.states»
if (secondaryCommand.equalsIgnoreCase("«state.name»")) {
fire("«state.name»");
} else
«ENDFOR»
{
System.out.println("«device.name» can only have the following states: «device.states.map[name].
join(',')».");
}
}
«ENDFOR»
if (command.equalsIgnoreCase("bye")) {
System.out.println("Ciao!");
break;
}
}
}
}
}
''')
}
def ruleMethodName(Rule device) {
'execute' + device.description.replaceAll('\\W', '_')
}
}
Error is in this auto generate file AbstractHomeAutomationRuntimeModule
/*
* generated by Xtext 2.10.0
*/
package org.xtext.example.home;
import com.google.inject.Binder;
import com.google.inject.Provider;
import com.google.inject.name.Names;
import java.util.Properties;
import org.eclipse.xtext.Constants;
import org.eclipse.xtext.IGrammarAccess;
import org.eclipse.xtext.generator.IGenerator2;
import org.eclipse.xtext.naming.DefaultDeclarativeQualifiedNameProvider;
import org.eclipse.xtext.naming.IQualifiedNameProvider;
import org.eclipse.xtext.parser.IParser;
import org.eclipse.xtext.parser.ITokenToStringConverter;
import org.eclipse.xtext.parser.antlr.AntlrTokenDefProvider;
import org.eclipse.xtext.parser.antlr.AntlrTokenToStringConverter;
import org.eclipse.xtext.parser.antlr.IAntlrTokenFileProvider;
import org.eclipse.xtext.parser.antlr.ITokenDefProvider;
import org.eclipse.xtext.parser.antlr.Lexer;
import org.eclipse.xtext.parser.antlr.LexerBindings;
import org.eclipse.xtext.parser.antlr.LexerProvider;
import org.eclipse.xtext.resource.IContainer;
import org.eclipse.xtext.resource.IResourceDescriptions;
import org.eclipse.xtext.resource.containers.IAllContainersState;
import org.eclipse.xtext.resource.containers.ResourceSetBasedAllContainersStateProvider;
import org.eclipse.xtext.resource.containers.StateBasedContainerManager;
import org.eclipse.xtext.resource.impl.ResourceDescriptionsProvider;
import org.eclipse.xtext.resource.impl.ResourceSetBasedResourceDescriptions;
import org.eclipse.xtext.scoping.IGlobalScopeProvider;
import org.eclipse.xtext.scoping.IScopeProvider;
import org.eclipse.xtext.scoping.IgnoreCaseLinking;
import org.eclipse.xtext.scoping.impl.AbstractDeclarativeScopeProvider;
import org.eclipse.xtext.scoping.impl.DefaultGlobalScopeProvider;
import org.eclipse.xtext.scoping.impl.ImportedNamespaceAwareLocalScopeProvider;
import org.eclipse.xtext.serializer.ISerializer;
import org.eclipse.xtext.serializer.impl.Serializer;
import org.eclipse.xtext.serializer.sequencer.ISemanticSequencer;
import org.eclipse.xtext.serializer.sequencer.ISyntacticSequencer;
import org.eclipse.xtext.service.DefaultRuntimeModule;
import org.eclipse.xtext.service.SingletonBinding;
import org.xtext.example.home.generator.RulesGenerator;
import org.xtext.example.home.parser.antlr.HomeAutomationAntlrTokenFileProvider;
import org.xtext.example.home.parser.antlr.HomeAutomationParser;
import org.xtext.example.home.parser.antlr.internal.InternalHomeAutomationLexer;
import org.xtext.example.home.scoping.HomeAutomationScopeProvider;
import org.xtext.example.home.serializer.HomeAutomationSemanticSequencer;
import org.xtext.example.home.serializer.HomeAutomationSyntacticSequencer;
import org.xtext.example.home.services.HomeAutomationGrammarAccess;
import org.xtext.example.home.validation.HomeAutomationValidator;
/**
* Manual modifications go to {#link HomeAutomationRuntimeModule}.
*/
#SuppressWarnings("all")
public abstract class AbstractHomeAutomationRuntimeModule extends DefaultRuntimeModule {
protected Properties properties = null;
#Override
public void configure(Binder binder) {
properties = tryBindProperties(binder, "org/xtext/example/home/HomeAutomation.properties");
super.configure(binder);
}
public void configureLanguageName(Binder binder) {
binder.bind(String.class).annotatedWith(Names.named(Constants.LANGUAGE_NAME)).toInstance("org.xtext.example.home.HomeAutomation");
}
public void configureFileExtensions(Binder binder) {
if (properties == null || properties.getProperty(Constants.FILE_EXTENSIONS) == null)
binder.bind(String.class).annotatedWith(Names.named(Constants.FILE_EXTENSIONS)).toInstance("home");
}
// contributed by org.eclipse.xtext.xtext.generator.grammarAccess.GrammarAccessFragment2
public ClassLoader bindClassLoaderToInstance() {
return getClass().getClassLoader();
}
// contributed by org.eclipse.xtext.xtext.generator.grammarAccess.GrammarAccessFragment2
public Class<? extends IGrammarAccess> bindIGrammarAccess() {
return HomeAutomationGrammarAccess.class;
}
// contributed by org.eclipse.xtext.xtext.generator.serializer.SerializerFragment2
public Class<? extends ISemanticSequencer> bindISemanticSequencer() {
return HomeAutomationSemanticSequencer.class;
}
// contributed by org.eclipse.xtext.xtext.generator.serializer.SerializerFragment2
public Class<? extends ISyntacticSequencer> bindISyntacticSequencer() {
return HomeAutomationSyntacticSequencer.class;
}
// contributed by org.eclipse.xtext.xtext.generator.serializer.SerializerFragment2
public Class<? extends ISerializer> bindISerializer() {
return Serializer.class;
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public Class<? extends IParser> bindIParser() {
return HomeAutomationParser.class;
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public Class<? extends ITokenToStringConverter> bindITokenToStringConverter() {
return AntlrTokenToStringConverter.class;
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public Class<? extends IAntlrTokenFileProvider> bindIAntlrTokenFileProvider() {
return HomeAutomationAntlrTokenFileProvider.class;
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public Class<? extends Lexer> bindLexer() {
return InternalHomeAutomationLexer.class;
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public Class<? extends ITokenDefProvider> bindITokenDefProvider() {
return AntlrTokenDefProvider.class;
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public Provider<InternalHomeAutomationLexer> provideInternalHomeAutomationLexer() {
return LexerProvider.create(InternalHomeAutomationLexer.class);
}
// contributed by org.eclipse.xtext.xtext.generator.parser.antlr.XtextAntlrGeneratorFragment2
public void configureRuntimeLexer(Binder binder) {
binder.bind(Lexer.class)
.annotatedWith(Names.named(LexerBindings.RUNTIME))
.to(InternalHomeAutomationLexer.class);
}
// contributed by org.eclipse.xtext.xtext.generator.validation.ValidatorFragment2
#SingletonBinding(eager=true)
public Class<? extends HomeAutomationValidator> bindHomeAutomationValidator() {
return HomeAutomationValidator.class;
}
// contributed by org.eclipse.xtext.xtext.generator.scoping.ImportNamespacesScopingFragment2
public Class<? extends IScopeProvider> bindIScopeProvider() {
return HomeAutomationScopeProvider.class;
}
// contributed by org.eclipse.xtext.xtext.generator.scoping.ImportNamespacesScopingFragment2
public void configureIScopeProviderDelegate(Binder binder) {
binder.bind(IScopeProvider.class).annotatedWith(Names.named(AbstractDeclarativeScopeProvider.NAMED_DELEGATE)).to(ImportedNamespaceAwareLocalScopeProvider.class);
}
// contributed by org.eclipse.xtext.xtext.generator.scoping.ImportNamespacesScopingFragment2
public Class<? extends IGlobalScopeProvider> bindIGlobalScopeProvider() {
return DefaultGlobalScopeProvider.class;
}
// contributed by org.eclipse.xtext.xtext.generator.scoping.ImportNamespacesScopingFragment2
public void configureIgnoreCaseLinking(Binder binder) {
binder.bindConstant().annotatedWith(IgnoreCaseLinking.class).to(false);
}
// contributed by org.eclipse.xtext.xtext.generator.exporting.QualifiedNamesFragment2
public Class<? extends IQualifiedNameProvider> bindIQualifiedNameProvider() {
return DefaultDeclarativeQualifiedNameProvider.class;
}
// contributed by org.eclipse.xtext.xtext.generator.builder.BuilderIntegrationFragment2
public Class<? extends IContainer.Manager> bindIContainer$Manager() {
return StateBasedContainerManager.class;
}
// contributed by org.eclipse.xtext.xtext.generator.builder.BuilderIntegrationFragment2
public Class<? extends IAllContainersState.Provider> bindIAllContainersState$Provider() {
return ResourceSetBasedAllContainersStateProvider.class;
}
// contributed by org.eclipse.xtext.xtext.generator.builder.BuilderIntegrationFragment2
public void configureIResourceDescriptions(Binder binder) {
binder.bind(IResourceDescriptions.class).to(ResourceSetBasedResourceDescriptions.class);
}
// contributed by org.eclipse.xtext.xtext.generator.builder.BuilderIntegrationFragment2
public void configureIResourceDescriptionsPersisted(Binder binder) {
binder.bind(IResourceDescriptions.class).annotatedWith(Names.named(ResourceDescriptionsProvider.PERSISTED_DESCRIPTIONS)).to(ResourceSetBasedResourceDescriptions.class);
}
// contributed by org.eclipse.xtext.xtext.generator.generator.GeneratorFragment2
public Class<? extends IGenerator2> bindIGenerator2() {
return RulesGenerator.class;
}
}
The error says that RulesGenerator should implement IGenerator2 interface (not IGenerator).
I had a similar issue, but my DslGenerator was extending from AbstractGenerator which has an implementation of IGenerator2. Everything look good but it still complained of a type mismatch between my generator and IGenerator2 in AbstractDslRuntimeModule.
I had VS Code with the source code open in the background. Apparently something was blocking or interfering when generating the xtext artifacts. Closing VS code, clean and generate the xtext artifact and it build correctly.
Need to setup JMock code to test call back with google protobuf
Full project is located at http://github.com/andrewmilkowski/template-transport
In short, following are methods signatures (below)
what I need to do is to test method getLongValue, using Jmock JUnit4Mockery
what is the best and cleanest way to go about this
thanks much!
package com.argmaps.client.proto;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.fepss.rpc.server.RpcApplication;
import com.fepss.rpc.client.RpcChannelImpl;
import org.apache.tapestry5.ioc.MappedConfiguration;
import com.google.protobuf.RpcController;
import com.google.protobuf.RpcCallback;
import com.argmaps.transport.proto.generated.TransportServer.ProtoService;
import com.argmaps.transport.proto.generated.TransportServer.ProtoService.Stub;
import com.argmaps.transport.proto.generated.TransportServer.DefaultLongValue;
import com.argmaps.transport.proto.generated.TransportServer.LongValue;
import com.argmaps.transport.proto.fepss.ProtoServer.TransportHandler;
public class TransportClient {
protected final Log LOG = LogFactory.getLog(this.getClass().getName());
private RpcController controller;
private TransportHandler transportHandler;
private ProtoService.Interface service;
private void open(String host, int port) {
RpcChannelImpl channel = new RpcChannelImpl(host, port);
controller = channel.newRpcController();
service = ProtoService.newStub(channel);
}
protected static class LongValueRpcCallback implements RpcCallback<LongValue> {
private long longValue = 0L;
#Override
public void run(LongValue result) {
longValue = result.getLongValue();
}
private long getLongValue() {
return longValue;
}
}
private void close() {
}
public long getLongValue(LongValueRpcCallback longValueRpcCallback) {
DefaultLongValue defaultLongValue = DefaultLongValue.newBuilder().setLongValue(0L).build();
service.getLongValue(controller, defaultLongValue, longValueRpcCallback);
if (LOG.isDebugEnabled()) {
LOG.debug("Long value from server:" + longValueRpcCallback.getLongValue());
}
return longValueRpcCallback.getLongValue();
}
public static void main(String[] args) {
String host = "localhost";
int port = 9090;
final String portArgKey = "--port=";
for (String cmd : args) {
if (cmd.startsWith(portArgKey)) {
port = Integer.parseInt(cmd.substring(portArgKey.length()));
break;
}
}
TransportClient c = new TransportClient();
c.open(host, port);
c.getLongValue(new LongValueRpcCallback());
c.close();
}
public TransportClient() {
}
public static class TransportModule {
public static void contributeIoHandler(MappedConfiguration<String, ProtoService> configruation) {
configruation.add(ProtoService.getDescriptor().getFullName(), new TransportHandler());
}
}
}
Because of the callback, needed to:
create abstract class LongValueRpcCallbackTemplate implements RpcCallback
create class LongValueRpcCallback extends LongValueRpcCallbackTemplate
and then complete implementation in the test class
Test class:
package com.argmaps.client.proto;
import com.argmaps.transport.proto.generated.TransportServer;
import com.fepss.rpc.client.RpcChannelImpl;
import com.google.protobuf.RpcController;
import org.jmock.Expectations;
import org.junit.Test;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JUnit4Mockery;
import static org.junit.Assert.assertEquals;
public class TransportClientTest {
Mockery context;
#Before
public void before() {
context = new JUnit4Mockery();
}
private class TestLongValueRpcCallback extends LongValueRpcCallbackTemplate {
private long longValue = 123456789L;
#Override
protected long getLongValue() {
return longValue;
}
}
#Test
public void testGetLongValue() {
final TransportServer.ProtoService.Interface mockedTransportServer = context.mock(TransportServer.ProtoService.Interface.class);
final RpcChannelImpl channel = new RpcChannelImpl("localhost", 9090);
final RpcController controller = channel.newRpcController();
final TransportServer.DefaultLongValue defaultLongValue = TransportServer.DefaultLongValue.newBuilder().setLongValue(0L).build();
com.argmaps.client.proto.TransportClient testObject = new TransportClient(controller, mockedTransportServer);
final TestLongValueRpcCallback testLongValueRpcCallback = new TestLongValueRpcCallback();
final long testLongValue = 123456789L;
context.checking(new Expectations() {
{
one(mockedTransportServer).getLongValue(controller, defaultLongValue, testLongValueRpcCallback);
}
});
assertEquals(testLongValue, testObject.getLongValue(testLongValueRpcCallback));
}
}