validate webhook using java Event.validateReceivedEvent always fails signature validation - java
I prepared a servlet in my web site to be notified from PayPal webhook. The development version of the servlet logs the http headers and the body. Here is a screen capture with one example:
I've created a "self contained test application" that shows the problem.
package com.rsws.renew;
import java.io.InputStream;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.SignatureException;
import java.util.HashMap;
import java.util.Map;
import com.paypal.api.payments.Event;
import com.paypal.base.Constants;
import com.paypal.base.rest.APIContext;
import com.paypal.base.rest.PayPalRESTException;
import com.paypal.base.rest.PayPalResource;
/**
* #author Ignacio
*
*/
public class TestWebHook {
public static void main(String[] argv) {
try {
InputStream is = InvoicePaid.class
.getResourceAsStream("/sdk_config.properties");
try {
PayPalResource.initConfig(is);
} catch (PayPalRESTException e) {
e.printStackTrace();
}
APIContext apiContext = new APIContext();
Map<String, String> map = new HashMap<>(PayPalResource.getConfigurations());
apiContext.setConfigurationMap(map);
Map<String,String> headers = new HashMap<String,String>();
// this is the data provided by PayPal sandbox
map.put(Constants.PAYPAL_WEBHOOK_ID, "3W2725225F637605K");
String payload = "{\"id\":\"WH-0T490472X6099635W-7LJ29748BW389372K\",\"create_time\":\"2015-09-25T23:14:14Z\",\"resource_type\":\"invoices\",\"event_type\":\"INVOICING.INVOICE.PAID\",\"summary\":\"An invoice was created\",\"resource\":{\"id\":\"INV2-8FSD-3HT6-BRHR-UHYV\",\"number\":\"MM00063\",\"status\":\"PAID\",\"merchant_info\":{\"email\":\"example#outlook.com\",\"first_name\":\"Dennis\",\"last_name\":\"Doctor\",\"business_name\":\"Medical Professional LLC\",\"address\":{\"line1\":\"1234 Main St\",\"line2\":\"Apt 302\",\"city\":\"Portland\",\"state\":\"OR\",\"postal_code\":\"97217\",\"country_code\":\"US\"}},\"billing_info\":[{\"email\":\"example#example.com\",\"business_name\":\"Medical Professionals LLC\",\"language\":\"en_US\"}],\"items\":[{\"name\":\"Sample Item\",\"quantity\":1,\"unit_price\":{\"currency\":\"USD\",\"value\":\"1.00\"},\"unit_of_measure\":\"QUANTITY\"}],\"invoice_date\":\"2015-09-28 PDT\",\"payment_term\":{\"term_type\":\"DUE_ON_RECEIPT\",\"due_date\":\"2015-09-28 PDT\"},\"tax_calculated_after_discount\":true,\"tax_inclusive\":false,\"total_amount\":{\"currency\":\"USD\",\"value\":\"1.00\"},\"payments\":[{\"type\":\"PAYPAL\",\"transaction_id\":\"22592127VV907111U\",\"transaction_type\":\"SALE\",\"method\":\"PAYPAL\",\"date\":\"2015-09-28 14:37:13 PDT\"}],\"metadata\":{\"created_date\":\"2015-09-28 14:35:46 PDT\",\"last_updated_date\":\"2015-09-28 14:37:13 PDT\",\"first_sent_date\":\"2015-09-28 14:35:47 PDT\",\"last_sent_date\":\"2015-09-28 14:35:47 PDT\"},\"paid_amount\":{\"paypal\":{\"currency\":\"USD\",\"value\":\"1.00\"}},\"links\":[{\"rel\":\"self\",\"href\":\"https://api.paypal.com/v1/invoicing/invoices/INV2-8FSD-3HT6-BRHR-UHYV\",\"method\":\"GET\"}]},\"links\":[{\"href\":\"https://api.paypal.com/v1/notifications/webhooks-events/WH-0T490472X6099635W-7LJ29748BW389372K\",\"rel\":\"self\",\"method\":\"GET\"},{\"href\":\"https://api.paypal.com/v1/notifications/webhooks-events/WH-0T490472X6099635W-7LJ29748BW389372K/resend\",\"rel\":\"resend\",\"method\":\"POST\"}]}";
headers.put("PAYPAL-CERT-URL", "https://api.paypal.com/v1/notifications/certs/CERT-360caa42-fca2a594-df8cd2d5");
headers.put("PAYPAL-TRANSMISSION-ID", "464163d0-e0ae-11e5-af72-51ae350aaff1");
headers.put("PAYPAL-TRANSMISSION-TIME", "2016-03-02T19:38:01Z");
headers.put("PAYPAL-AUTH-ALGO", "SHA256withRSA");
headers.put("PAYPAL-TRANSMISSION-SIG", "S3AjY87GLp1MP/UsGAWPoEes+laa7xbV4X7pMi9PdC0QR7MoNC/L/O2UThAh1IBzDZ5DGXvkEDvXK9fF0IfoS2QtLJUBm5+UFoo1jJMlH+QCiJUEHSuio2UrFGbxoqaIPcA1PN0tmd5FwikDRPCnpht6pvMvCZV1FEQbBMr9ld3d3XoWBKeWQG+oxAWSTNYJiKQIrM6l/8+hKVQ1LZID8dtR3c7y6eFxNFsDQ3WgwChZZ15vpyhDWQ4t08m3PsWFyjvsQmNRyXQyUeAC8xw96sBwGmHsgwKJwbAamVrWicQqQ/tXuUcqx9Y0pg3P4LuGNPFKzktq9L3ZImTEJxpRLA==");
// this shows invalid
System.out.println(Event.validateReceivedEvent(apiContext, headers, payload) ? "valid" : "invalid");
// this is the data provided in the sdk examples https://github.com/paypal/PayPal-Java-SDK/blob/master/rest-api-sdk/src/test/java/com/paypal/base/ValidateCertTest.java
map.put(Constants.PAYPAL_WEBHOOK_ID, "3RN13029J36659323");
payload = "{\"id\":\"WH-2W7266712B616591M-36507203HX6402335\",\"create_time\":\"2015-05-12T18:14:14Z\",\"resource_type\":\"sale\",\"event_type\":\"PAYMENT.SALE.COMPLETED\",\"summary\":\"Payment completed for $ 20.0 USD\",\"resource\":{\"id\":\"7DW85331GX749735N\",\"create_time\":\"2015-05-12T18:13:18Z\",\"update_time\":\"2015-05-12T18:13:36Z\",\"amount\":{\"total\":\"20.00\",\"currency\":\"USD\"},\"payment_mode\":\"INSTANT_TRANSFER\",\"state\":\"completed\",\"protection_eligibility\":\"ELIGIBLE\",\"protection_eligibility_type\":\"ITEM_NOT_RECEIVED_ELIGIBLE,UNAUTHORIZED_PAYMENT_ELIGIBLE\",\"parent_payment\":\"PAY-1A142943SV880364LKVJEFPQ\",\"transaction_fee\":{\"value\":\"0.88\",\"currency\":\"USD\"},\"links\":[{\"href\":\"https://api.sandbox.paypal.com/v1/payments/sale/7DW85331GX749735N\",\"rel\":\"self\",\"method\":\"GET\"},{\"href\":\"https://api.sandbox.paypal.com/v1/payments/sale/7DW85331GX749735N/refund\",\"rel\":\"refund\",\"method\":\"POST\"},{\"href\":\"https://api.sandbox.paypal.com/v1/payments/payment/PAY-1A142943SV880364LKVJEFPQ\",\"rel\":\"parent_payment\",\"method\":\"GET\"}]},\"links\":[{\"href\":\"https://api.sandbox.paypal.com/v1/notifications/webhooks-events/WH-2W7266712B616591M-36507203HX6402335\",\"rel\":\"self\",\"method\":\"GET\"},{\"href\":\"https://api.sandbox.paypal.com/v1/notifications/webhooks-events/WH-2W7266712B616591M-36507203HX6402335/resend\",\"rel\":\"resend\",\"method\":\"POST\"}]}";
headers.put("PAYPAL-CERT-URL", "https://api.sandbox.paypal.com/v1/notifications/certs/CERT-360caa42-fca2a594-a5cafa77");
headers.put("PAYPAL-TRANSMISSION-ID", "b2384410-f8d2-11e4-8bf3-77339302725b");
headers.put("PAYPAL-TRANSMISSION-TIME", "2015-05-12T18:14:14Z");
headers.put("PAYPAL-AUTH-ALGO", "SHA256withRSA");
headers.put("PAYPAL-TRANSMISSION-SIG", "vSOIQFIZQHv8G2vpbOpD/4fSC4/MYhdHyv+AmgJyeJQq6q5avWyHIe/zL6qO5hle192HSqKbYveLoFXGJun2od2zXN3Q45VBXwdX3woXYGaNq532flAtiYin+tQ/0pNwRDsVIufCxa3a8HskaXy+YEfXNnwCSL287esD3HgOHmuAs0mYKQdbR4e8Evk8XOOQaZzGeV7GNXXz19gzzvyHbsbHmDz5VoRl9so5OoHqvnc5RtgjZfG8KA9lXh2MTPSbtdTLQb9ikKYnOGM+FasFMxk5stJisgmxaefpO9Q1qm3rCjaJ29aAOyDNr3Q7WkeN3w4bSXtFMwyRBOF28pJg9g==");
// this shows valid
System.out.println(Event.validateReceivedEvent(apiContext, headers, payload) ? "valid" : "invalid");
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (SignatureException e) {
e.printStackTrace();
} catch (PayPalRESTException e) {
e.printStackTrace();
}
}
}
The code shows valid when the data has been taken from examples and invalid when the data comes from paypal web site.
I wonder why this cannot be validated. Any help is welcome.
You may want to test the validation with actual sandbox transactions and webhook events. Simulator mock data may not be updated with the sandbox algorithm, and is recommended for testing URL accessibility of your script.
Related
Azure Web Job Upload using java client
History for context: I am trying to run a web job from an HTTP Client. The file is a ZIP file . and contains a java class and bat file to run that java class. This runs okay when i do from POSTMAN. But when i use HTTP client, i get the following error always " '---i-NPsGbTVUpaP0CeJxMQVrHoDHvaxo3' is not recognized as an internal or external command" - Please help – Jagaran yesterday #Jagaran if it only happen from some clients, it is likely unrelated. Please ask a new question – David Ebbo 21 hours ago No any HTTP Client i am using in java, it is the same. it works in CURL or loading from web console. My sample code below – Jagaran 2 hours ago No any HTTP Client i am using in java, it is the same. it works in CURL or loading from web console. Do you have any sample Java based HTTP Client where I can publish Azure Web Job? I have tried all Java REST clients. May be i am doing something wrong. The error I get in Azure console is '---i-NPsGbTVUpaP0CeJxMQVrHoDHvaxo3' is not recognized as an internal or external command, [08/25/2017 09:30:22 > e7f683: ERR ] operable program or batch file.o I feel Content type = applciation /zip is not happening correctly when using java. Please help us. Sample Code: import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.apache.http.entity.ContentType; import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.Unirest; /** * #author jagaran.das * */ public class AIPHTTPClient { /** * #param args * #throws IOException */ #SuppressWarnings({ "unused", "rawtypes" }) public static void main(String[] args) throws IOException { try { URI uri = new AIPHTTPClient().getURI(); HttpResponse<InputStream> jsonResponse = Unirest.put("https://<URL>/api/triggeredwebjobs/TestJOb") .basicAuth("$AzureWebJobTestBRMS", "XXXXX") .header("content-disposition","attachement; filename=acvbgth.bat") .field("file", new FileInputStream(new File(uri)) ,ContentType.create("content-type: application/zip"),"AzureWebJob.zip").asBinary(); System.out.println(jsonResponse.getStatusText()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } public InputStream readZip() { ZipFile zipFile = null; ZipEntry zipEntry = zipFile.getEntry("run.bat"); InputStream stream = null; /* try { zipFile = new ZipFile("/Users/jagaran.das/Documents/work/AIP/AzureWebJob.zip"); java.util.Enumeration<? extends ZipEntry> entries = zipFile.entries(); while(entries.hasMoreElements()){ ZipEntry entry = entries.nextElement(); stream = zipFile.getInputStream(entry); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } */ try { stream = zipFile.getInputStream(zipEntry); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return stream; } public URI getURI() throws MalformedURLException { File file = new File("/Users/jagaran.das/Documents/work/AIP/azure-poc/AzureWebJob.zip"); URI fileUri = file.toURI(); System.out.println("URI:" + fileUri); URL fileUrl = file.toURI().toURL(); System.out.println("URL:" + fileUrl); URL fileUrlWithoutSpecialCharacterHandling = file.toURL(); System.out.println("URL (no special character handling):" + fileUrlWithoutSpecialCharacterHandling); return fileUri; } }
I've been a little too harsh in my answer before really trying stuff out. Apologies. I've now tried out your snippet and looks like you're hitting an issue with Unirest - probably this one. My advice would be to just move to Apache's HTTP library. Here's a working sample: import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.EntityBuilder; import org.apache.http.client.methods.HttpPut; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; import java.io.File; public class App { public static void main( String[] args ) { File sourceZipFile = new File("webjob.zip"); String kuduApiUrl = "https://yoursitename.scm.azurewebsites.net/api/zip/site/wwwroot/app_data/jobs/triggered/job988/"; HttpEntity httpEntity = EntityBuilder.create() .setFile(sourceZipFile) .build(); CredentialsProvider provider = new BasicCredentialsProvider(); UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( "$yoursitename", "SiteLevelPasSw0rD" ); provider.setCredentials(AuthScope.ANY, credentials); HttpClient client = HttpClientBuilder.create() .setDefaultCredentialsProvider(provider) .build(); HttpPut putRequest = new HttpPut(kuduApiUrl); putRequest.setEntity(httpEntity); // Kudu's Zip API expects application/zip putRequest.setHeader("Content-type", "application/zip"); try { HttpResponse response = client.execute(putRequest); int statusCode = response.getStatusLine().getStatusCode(); HttpEntity entity = response.getEntity(); String resBody = EntityUtils.toString(entity, "UTF-8"); System.out.println(statusCode); System.out.println(resBody); } catch (Exception e) { e.printStackTrace(); } } } That's sending Content-Type: application/zip and the raw zip contents in the body (no multipart horse manure). I've probably over-engineered the sample.. but it is what it is. The upload is successful and the WebJob published:
Glad for you that you have solved the issue and I try to provide a workaround for your reference. Deploy WebJob to azure , in addition to using REST API, you can also use the FTP way. Of course, the premise is that you need to know the directory uploaded by webjob via KUDU. I offer you the snippet of code below via FTP4J libiary: import java.io.File; import it.sauronsoftware.ftp4j.FTPClient; public class UploadFileByFTP { private static String hostName = <your host name>; private static String userName = <user name>; private static String password = <password>; public static void main(String[] args) { try { // create client FTPClient client = new FTPClient(); // connect host client.connect(hostName); // log in client.login(userName, password); // print address System.out.println(client); // change directory client.changeDirectory("/site/wwwroot/App_Data/jobs/continuous"); // current directory String dir = client.currentDirectory(); System.out.println(dir); File file = new File("D:/test.zip"); client.upload(file); } catch (Exception e) { e.printStackTrace(); } } } You can follow this tutorial to configure your parameters.
UTGARD opc client: Read multiple items repeatedly
I have written the following code using the Utgard OPC library. I need to read data from an OPC server once every 15 seconds. However, I'm not sure if this is the most optimal way to implement it. In my scenario I require to read upward of 300 tags from the server. Any suggestions? package opcClientSalem; import java.util.concurrent.Executors; import org.jinterop.dcom.common.JIException; //import org.jinterop.dcom.core.JIVariant; import org.openscada.opc.lib.common.ConnectionInformation; import org.openscada.opc.lib.common.NotConnectedException; import org.openscada.opc.lib.da.AccessBase; import org.openscada.opc.lib.da.AddFailedException; import org.openscada.opc.lib.da.AutoReconnectController; import org.openscada.opc.lib.da.DataCallback; import org.openscada.opc.lib.da.DuplicateGroupException; import org.openscada.opc.lib.da.Item; import org.openscada.opc.lib.da.ItemState; import org.openscada.opc.lib.da.Server; import org.openscada.opc.lib.da.SyncAccess; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; public class opcClientSalem { public static void main(String[] args) throws Exception { // create connection information System.out.println("**********Initializing OPC Client**********"); java.util.logging.Logger.getLogger("org.jinterop").setLevel(java.util.logging.Level.OFF); final ConnectionInformation ci = new ConnectionInformation("myusername","mypassword"); ci.setHost("myhost"); ci.setDomain(""); ci.setProgId("Matrikon.OPC.Simulation.1"); ci.setClsid("F8582CF2-88FB-11D0-B850-00C0F0104305"); String itemIdArr[] = {"Random.Real8","Random.Int2"}; // This is where I would have an array of all items // create a new server final Server server = new Server(ci, Executors.newSingleThreadScheduledExecutor()); AutoReconnectController controller = new AutoReconnectController(server); try { // connect to server System.out.println("**********Attempting to connect to OPC**********"); controller.connect(); System.out.println("**********Successfully connected to OPC**********"); // add sync access, poll every 15000 ms final AccessBase access = new SyncAccess(server, 15000); while(true){ for(final String str : itemIdArr){ access.addItem(str, new DataCallback() { #Override public void changed(Item item, ItemState state) { // Building a JSON string with value recieved String record = "[ {" +"\""+"name"+"\" :\""+str + "\",\""+"timestamp"+"\" :"+ state.getTimestamp().getTime().getTime()+ ",\""+"value"+"\" : "+value.replace("[", "").replace("]", "") +",\"tags\":{\"test\":\"test1\"}} ]"; try { // Post JSON string to my API which ingests this data new opcClientSalem().restpost(record); } catch (ClientProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); } // start reading access.bind(); Thread.sleep(5000); } // wait a little bit // stop reading //access.unbind(); } catch (final JIException e) { //System.out.println(String.format("%08X: %s", e.getErrorCode(), server.getErrorMessage(e.getErrorCode()))); } } private void restpost(String record) throws ClientProtocolException, IOException{ HttpClient client = new DefaultHttpClient(); HttpPost post = new HttpPost("http://localhost/myapi/datapoints"); StringEntity input = new StringEntity(record); post.setEntity(input); HttpResponse response = client.execute(post); System.out.println("Post success::"+record); } }
I'm not sure you need to add the items over and over again in your while group. In other libraries (.net or native c++) usually you need to add the items only once, and the callback called whenever the value of the item is changed. In .net or c++ we get a global callback per group, which seems more effective than individual callbacks per items. Maybe SyncAccess has some global callback, look for it. So the possible optimizations: remove the while part, add items only once and sleep the thread infinite. look for global callback for all items
You should create a subscription in this case.
Can we use Google data plugin in scala?
I am new to scala. I am trying to import contacts from gmail in to my application.I can create sample application in java using Eclipse by following link https://developers.google.com/google-apps/contacts/v2/developers_guide_java?csw=1#retrieving_without_query I can Import the contacts in My java application.And It works fine. My java code is import com.google.gdata.client.contacts.ContactsService; import com.google.gdata.data.contacts.ContactEntry; import com.google.gdata.data.contacts.ContactFeed; import com.google.gdata.model.gd.Email; import com.google.gdata.util.AuthenticationException; import com.google.gdata.util.ServiceException; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; /** * This is a test template */ public class Contacts { public static void main(String[] args) { try { // Create a new Contacts service System.out.println("hiiii"+args[0]); ContactsService myService = new ContactsService("My Application"); myService.setUserCredentials(args[0],args[1]); // Get a list of all entries URL metafeedUrl = new URL("http://www.google.com/m8/feeds/contacts/"+args[0]+"#gmail.com/base"); System.out.println("Getting Contacts entries...\n"); ContactFeed resultFeed = myService.getFeed(metafeedUrl, ContactFeed.class); List<ContactEntry> entries = resultFeed.getEntries(); for(int i=0; i<entries.size(); i++) { ContactEntry entry = entries.get(i); System.out.println("\t" + entry.getTitle().getPlainText()); System.out.println("\t" + entry.getEmailAddresses()); for(com.google.gdata.data.extensions.Email emi:entry.getEmailAddresses()) System.out.println(emi.getAddress()); } System.out.println("\nTotal Entries: "+entries.size()); } catch(AuthenticationException e) { e.printStackTrace(); System.out.println("Authentication failed"); } catch(MalformedURLException e) { e.printStackTrace(); System.out.println("url"); } catch(ServiceException e) { e.printStackTrace(); System.out.println("Service exc"); } catch(IOException e) { e.printStackTrace(); System.out.println("IO exception"); } } } I tried to use same library functions for My Scala but it doesn't work. My Scala code is import com.google.gdata.client.contacts.ContactsService import com.google.gdata.data.contacts.ContactEntry import com.google.gdata.data.contacts.ContactFeed import com.google.gdata.util.ServiceException import com.google.gdata.util.AuthenticationException import java.io.IOException import java.net.URL import java.net.MalformedURLException object Contacts { class Test { def main(args:Array[String]) { println("hiii") try { // Create a new Contacts service //ContactsService myService = new ContactsService("My Application"); //myService.setUserCredentials(args[0],args[1]); val myService= new ContactsService("My App") myService.setUserCredentials("MyemailId","password") val metafeedUrl = new URL("http://www.google.com/m8/feeds/contacts/"+"MyemailId"+"#gmail.com/base") val resultFeed = myService.getFeed(metafeedUrl, classOf[ContactFeed]) //List<ContactEntry> entries = resultFeed.getEntries(); val entries = resultFeed.getEntries(); for(i <-0 to entries.size()) { var entry=entries.get(i) println(entry.getTitle().getPlainText()) } } catch{ case e:AuthenticationException=>{ e.printStackTrace(); } case e:MalformedURLException=>{ e.printStackTrace(); } case e:ServiceException=>{ e.printStackTrace(); } case e:IOException=> { e.printStackTrace(); } } } } } But it does not works. Can I use java library in Scala?
The problem that's causing your error, is that the object Contacts does not have a main method. Instead, it contains an inner class called Test which has a main method. I don't believe that is what you want (in Scala, object methods are the equivalent of Java static methods), so the main method should be moved out into Contacts, and the inner class deleted. Also, for(i <-0 to entries.size()) is probably a mistake. This is roughly equivalent to for(int i=0; i<=entries.size(); i++) (notice the <=). You probably want for(i <-0 until entries.size()). While you're there, you can kill the try..catch blocks if you like, as Scala doesn't use checked exceptions. If you import scala.collection.JavaConversions._, then you can use for (entry <- entries), which may be less error prone. If it still doesn't work (or when posting future questions), provide as much info as you can (error messages, warnings, etc.), as it makes it far more likely that someone will be able to help.
WebScraping with HTML Unit Issue with apache lang3
UPDATE: I ended up using ghost.py but would appreciate a response. I have been using straight java/apache httpd and nio to crawl must pages recently but came across what I expected was a simple issue that actually appears to not be. I am trying to use html unit to crawl a page but every time I run the code below I get the error proceeding the code telling me a jar is missing. Unfortunately, I could not find my answer here as there is a weird part to this question. So, here is the weird part. I have the jar (lang3) it is up to date and it contains a method StringUtils.startsWithIgnoreCase(String string,String prefix) that works. I would really like to avoid selenium as I need to crawl (if sampling tells me properly), about 1000 pages on the same site over several months. Is there a particular version I need? All I saw was the note to update to 3-1 which I have. Is there a method if installation that works? Thanks. The code I am running is: import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import com.gargoylesoftware.htmlunit.BrowserVersion; import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException; import com.gargoylesoftware.htmlunit.Page; import com.gargoylesoftware.htmlunit.RefreshHandler; import com.gargoylesoftware.htmlunit.WebClient; import com.gargoylesoftware.htmlunit.html.HtmlAnchor; import com.gargoylesoftware.htmlunit.html.HtmlForm; import com.gargoylesoftware.htmlunit.html.HtmlPage; import com.gargoylesoftware.htmlunit.html.HtmlTable; import com.gargoylesoftware.htmlunit.html.HtmlTableRow; public class crawl { public crawl() { //TODO Constructor crawl_page(); } public void crawl_page() { //TODO control the crawling WebClient webClient = new WebClient(BrowserVersion.FIREFOX_10); webClient.setRefreshHandler(new RefreshHandler() { public void handleRefresh(Page page, URL url, int arg) throws IOException { System.out.println("handleRefresh"); } }); //the url for CA's Megan's law sex off String url="http://www.myurl.com" //not my url HtmlPage page; try { page = (HtmlPage) webClient.getPage(url); HtmlForm form=page.getFormByName("_ctl0"); form.getInputByName("cbAgree").setChecked(true); page=form.getButtonByName("Continue").click(); System.out.println(page.asText()); } catch (FailingHttpStatusCodeException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } The error is: Exception in thread "main" java.lang.NoSuchMethodError: org.apache.commons.lang3.StringUtils.startsWithIgnoreCase(Ljava/lang/CharSequence;Ljava/lang/CharSequence;)Z at com.gargoylesoftware.htmlunit.util.URLCreator$URLCreatorStandard.toUrlUnsafeClassic(URLCreator.java:66) at com.gargoylesoftware.htmlunit.util.UrlUtils.toUrlUnsafe(UrlUtils.java:193) at com.gargoylesoftware.htmlunit.util.UrlUtils.toUrlSafe(UrlUtils.java:171) at com.gargoylesoftware.htmlunit.WebClient.<clinit>(WebClient.java:159) at ca__soc.crawl.crawl_page(crawl.java:34) at ca__soc.crawl.<init>(crawl.java:24) at ca__soc.us_ca_ca_soc.main(us_ca_ca_soc.java:17)
According to documentation Since: 2.4, 3.0 Changed signature from startsWithIgnoreCase(String, String) to startsWithIgnoreCase(CharSequence, CharSequence) so, probably you have two similar jars on your classpath.
Android Phone as Realtime MJPEG Video Server
I'm trying to use my phone as a realtime MJPEG video source. So far, capturing frames and converting them into JPEGs is no big deal. My real issue is sending the multipart response properly. There's tons of documentation about sending multipart responses out there, but the issue with them is that they all expect that all of the images are available at the time the HTTP request comes in (such as would be used for a multi-image upload). In order to stream in realtime, of course, I need to be able to begin to send the multipart response while continually adding jpegs in the body. I'm by no means a HTTP buff, so it's not desirable for me be required to roll my own HTTP response and write directly to a socket. Is there a library out there that supports this kind of behavior? I've scoured the internet for solutions, but I really don't see anything useful out there. Any ideas? Worst case scenario, I'd be willing to look at human-readable documentation of how to write a multipart response by hand, but I'd really just rather use a library if that's possible. Thanks in advance. edit: got it working using the orielly servlet library as per sigmavirus' suggestion. Note that the MJPEG stream is more or less implicitly inferred from the fact that I'm sending a multipart/x-mixed-replace that only has image/jpeg's in it. Check out the comment in my code for a tutorial that shows what jetty libraries you'll need to get this running. Of course, you'll additionally need cos.jar, the Orielly servlet library. The code follows: package edu.stevens.arpac.webclient; import java.io.IOException; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.Collections; import java.util.List; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletRequest; import org.apache.http.conn.util.InetAddressUtils; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; import org.eclipse.jetty.server.Request; import com.oreilly.servlet.MultipartResponse; import com.oreilly.servlet.ServletUtils; import android.os.Environment; import android.util.Log; // holla at http://puregeekjoy.blogspot.com/2011/06/running-embedded-jetty-in-android-app.html public class JettyServer extends Thread { private static final String TAG = "JettyServer"; private Server webServer; private Boolean isStarted = false; public JettyServer() { super(); Log.i(TAG, "Initializing server to port 8080"); webServer = new Server(8080); Handler handler = new AbstractHandler() { public void handle(String target, Request request, HttpServletRequest servletRequest, HttpServletResponse servletResponse) throws IOException, ServletException { ServletOutputStream out = servletResponse.getOutputStream(); MultipartResponse multi = new MultipartResponse(servletResponse); Boolean go = true; while( go ) { try { multi.startResponse("image/jpeg"); ServletUtils.returnFile(Environment.getExternalStorageDirectory().getPath() + "/ARPac/twi.jpg", out); multi.endResponse(); } catch(IOException ex) { go = false; Log.i(TAG, "IO Failed with exception " + ex.getMessage()); } } request.setHandled(true); } }; webServer.setHandler(handler); try { webServer.start(); Log.d(TAG, "started Web server # " + getIPAddress()); isStarted = true; } catch (Exception e) { Log.d(TAG, "unexpected exception starting Web server: " + e); } } /** * Get IP address from first non-localhost interface * #return address or empty string */ private String getIPAddress() { try { List<NetworkInterface> interfaces = Collections.list(NetworkInterface.getNetworkInterfaces()); for (NetworkInterface intf : interfaces) { List<InetAddress> addrs = Collections.list(intf.getInetAddresses()); for (InetAddress addr : addrs) { if (!addr.isLoopbackAddress()) { String sAddr = addr.getHostAddress().toUpperCase(); if (InetAddressUtils.isIPv4Address(sAddr)) { //Log.d(TAG, "IP address is: " + sAddr); return sAddr; } } } } } catch (Exception ex) { Log.e(TAG, "could not get IP address: " + ex.getMessage()); } // for now eat exceptions Log.e(TAG, "Could not find a non-loopback IPv4 address!"); return ""; } public void teardown() { if( isStarted ) { try { webServer.stop(); isStarted = false; } catch (Exception e) { Log.e(TAG, "Couldn't stop server. Probably was called when server already stopped."); } } } public void run() { } }
Have you seen this? http://www.servlets.com/cos/javadoc/com/oreilly/servlet/MultipartResponse.html It looks like the example sends each part individually and waits a specified time limit before sending the next or receiving an interrupt.