Userside Network Consuming - java

Currently, I'm working on speed test CLI and I successfully implement CLI with java with Jython. But I got an issue that every time calculate server-side internet speed and I want to consume user side network resources. so anybody knows what is the correct way of implementation
this java code is following:
package com.speedtest.serviceimpl;
import java.io.InputStream;
import java.io.StringWriter;
import javax.servlet.http.HttpServletRequest;
import org.json.JSONException;
import org.json.JSONObject;
import org.python.util.PythonInterpreter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.speedtest.dto.SpeedTestResponse;
import com.speedtest.service.SpeedTestJythonService;
#Service
public class JythonService implements SpeedTestJythonService {
private static final Logger Log =LoggerFactory.getLogger(JythonService.class);
PythonInterpreter pythonInterpreter;
public JythonService() {
pythonInterpreter = new PythonInterpreter();
}
#Override
public SpeedTestResponse execMethodInPyClass(InputStream inputStream,HttpServletRequest request) {
try {
StringWriter stringWriter = new StringWriter();
Double upload_speed = new Double(0.0);
Double download_speed = new Double(0.0);
Double download_bytes = new Double(0.0);
Double upload_bytes = new Double(0.0);
String service_provider_name = new String();
pythonInterpreter.setOut(stringWriter);
pythonInterpreter.execfile(inputStream);
System.out.println("Before Response==> "+stringWriter.toString());
String res = stringWriter.toString().replace("|", "");
System.out.println("Aftr Response==> "+res);
JSONObject obj = new JSONObject(res);
Double download = obj.getDouble("download");
Double upload = obj.getDouble("upload");
Double ping = obj.getDouble("ping");
Long bytes_sent = obj.getLong("bytes_sent");
Long bytes_received = obj.getLong("bytes_received");
JSONObject json_server = obj.getJSONObject("server");
String name = json_server.getString("name");
String country_name = json_server.getString("country");
JSONObject json_client = obj.getJSONObject("client");
String isp = json_client.getString("isp");
String ip = json_client.getString("ip");
String country_code = json_client.getString("country");
String isprating = json_client.getString("isprating");
Log.info("obj", obj.toString());
Log.info("download", download.toString());
Log.info("upload", upload.toString());
Log.info("ping", ping.toString());
Log.info("bytes_sent", bytes_sent.toString());
Log.info("bytes_received", bytes_received.toString());
download_speed = (download / 1000.0 / 1000.0);
upload_speed = (upload / 1000.0 / 1000.0);
download_bytes = bytes_received.doubleValue();
upload_bytes = bytes_sent.doubleValue();
SpeedTestResponse response = new SpeedTestResponse();
response.setDownload(download_speed);
response.setIspName(isp);
response.setPing(ping);
response.setUpload(upload_speed);
return response;
} catch (JSONException e) {
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
}
python code is
SpeedTest.py

Related

Push data from Dynamo DB to elasticsearch using java

Hi i have created a handler in java for getting the events from dynamo DB
Here is my code
package com.Lambda.dynamodb;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.LambdaLogger;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent.DynamodbStreamRecord;
public class DDBEventProcessor implements
RequestHandler<DynamodbEvent, String> {
public String handleRequest(DynamodbEvent ddbEvent, Context context) {
for (DynamodbStreamRecord record : ddbEvent.getRecords()){
System.out.println(record.getEventID());
System.out.println(record.getEventName());
System.out.println(record.getDynamodb().toString());
}
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
}
}
Lambda function able to write the events in cloudwatch but the challenge is i have to index all the streamed records to the AWS elasticsearch service endpoint and index it.
while search through blogs i got few code samples in python and node.js but my requirement is i have to build this lambda function in java
Could anyone please suggest how to achieve this in java lambda function?
Hi i have included the code below may helpful to some one. Dynamo DB streams to index the document in elasticsearch both inside AWS and outside AWS
package com.Firstlambda;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.amazonaws.auth.AWS4Signer;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.ItemUtils;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent.DynamodbStreamRecord;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.json.JSONObject;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class HelloWorld implements RequestHandler<DynamodbEvent, String> {
private static String serviceName = "es";
private static String region = "us-east-1";
private static String aesEndpoint = ""
private static String index = "";
private static String type = "_doc";
static final AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
public String handleRequest(DynamodbEvent ddbEvent, Context context) {
for (DynamodbStreamRecord record : ddbEvent.getRecords()) {
System.out.println("EventName : " + record.getEventName());
System.out.println("EventName : " + record.getDynamodb());
//AWS outside
RestHighLevelClient esClient = esClient();
//AWS outside
//AWS Inside
//RestHighLevelClient esClient = esClient(serviceName, region);
//AWS Inside
if (record.getEventName().toLowerCase().equals("insert")) {
String JsonString = getJsonstring(record.getDynamodb().getNewImage());
String JsonUniqueId = GetIdfromJsonString(JsonString);
IndexRequest indexRequest = new IndexRequest(index, type, JsonUniqueId);
indexRequest.source(JsonString, XContentType.JSON);
try {
IndexResponse indexResponse = esClient.index(indexRequest, RequestOptions.DEFAULT);
System.out.println(indexResponse.toString());
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
} catch (IOException e) {
System.out.println(e.getMessage());
}
} else if (record.getEventName().toLowerCase().equals("modify")) {
String JsonString = getJsonstring(record.getDynamodb().getNewImage());
String JsonUniqueId = GetIdfromJsonString(JsonString);
UpdateRequest request = new UpdateRequest(index, type, JsonUniqueId);
String jsonString = JsonString;
request.doc(jsonString, XContentType.JSON);
try {
UpdateResponse updateResponse = esClient.update(
request, RequestOptions.DEFAULT);
System.out.println(updateResponse.toString());
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
} catch (IOException e) {
System.out.println(e.getMessage());
}
} else {
System.out.println("remove");
System.out.println("KEYID : " + record.getDynamodb().getKeys().get("ID").getN());
String deletedId = record.getDynamodb().getKeys().get("ID").getN();
DeleteRequest request = new DeleteRequest(index, type, deletedId);
try {
DeleteResponse deleteResponse = esClient.delete(
request, RequestOptions.DEFAULT);
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
}
return "Successfullyprocessed";
}
public String getJsonstring(Map<String, AttributeValue> newIma) {
String json = null;
Map<String, AttributeValue> newImage = newIma;
List<Map<String, AttributeValue>> listOfMaps = new ArrayList<Map<String, AttributeValue>>();
listOfMaps.add(newImage);
List<Item> itemList = ItemUtils.toItemList(listOfMaps);
for (Item item : itemList) {
json = item.toJSON();
}
return json;
}
public String GetIdfromJsonString(String Json) {
JSONObject jsonObj = new JSONObject(Json);
return String.valueOf(jsonObj.getInt("ID"));
}
// Adds the interceptor to the ES REST client
// public static RestHighLevelClient esClient(String serviceName, String region) {
// AWS4Signer signer = new AWS4Signer();
// signer.setServiceName(serviceName);
// signer.setRegionName(region);
// HttpRequestInterceptor interceptor = new AWSRequestSigningApacheInterceptor(serviceName, signer, credentialsProvider);
// return new RestHighLevelClient(RestClient.builder(HttpHost.create(aesEndpoint)).setHttpClientConfigCallback(hacb -> hacb.addInterceptorLast(interceptor)));
// }
public static RestHighLevelClient esClient() {
String host = "d9bc7cbca5ec49ea96a6ea683f70caca.eastus2.azure.elastic-cloud.com";
int port = 9200;
String userName = "elastic";
String password = "L4Nfnle3wxLmV95lffwsf$Ub46hp";
String protocol = "https";
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(userName, password));
RestClientBuilder builder = RestClient.builder(new HttpHost(host, port, protocol))
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
RestHighLevelClient client = new RestHighLevelClient(builder);
return client;
}
}
This is just a sample code has to be modified based on our requirements

Find if a point is inside a polygon - JAVA jts / awt / geotools

I have a random number of coordinates for a polygon taken from a shapefile.
-119.00072399999999 35.36158, -118.99903 35.361576, -118.999026 35.362579, -118.999023 35.363482, -118.999019 35.36432, -118.999408 35.364847999999995, -118.999406 35.365564, -118.999402 35.366516, -118.999398 35.367467999999995, -118.999394 35.368438, -118.999256 35.368438, -118.998232 35.368441
I now have to check if a point (33.63705, -112.17563) is inside this polygon.
My concern is that, my coordinates doesn't fit into an int datatype.
Here is what I have tried:
import java.awt.Polygon;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.geotools.data.DataStore;
import org.geotools.data.DataStoreFinder;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.geotools.data.simple.SimpleFeatureSource;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geometry.jts.JTSFactoryFinder;
import org.geotools.referencing.crs.DefaultGeographicCRS;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.Point;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
public class ReadShapeFile {
public static void main(String[] args) {
File file = new File("D:\\shapefile201806\\tl_2018_06_bg.shp");
try {
Map<String, String> connect = new HashMap<String, String>();
connect.put("url", file.toURI().toString());
DataStore dataStore = DataStoreFinder.getDataStore(connect);
String[] typeNames = dataStore.getTypeNames();
String typeName = typeNames[0];
System.out.println("Reading content : " + typeName);
SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName);
SimpleFeatureCollection collection = featureSource.getFeatures();
SimpleFeatureIterator iterator = collection.features();
try {
while (iterator.hasNext()) {
SimpleFeature feature = iterator.next();
String featureString = feature.toString();
List<String> polygonList = new ArrayList<String>();
String polygonCoordinates = StringUtils.substringBetween(featureString, "(((", ")))");
System.out.println(polygonCoordinates);
polygonList = Arrays.asList(polygonCoordinates.split(","));
SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();
b.setName("MyFeatureType");
b.setCRS(DefaultGeographicCRS.WGS84);
b.add("location", Point.class);
final SimpleFeatureType TYPE = b.buildFeatureType();
SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(TYPE);
GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory();
SimpleFeature pointFeature = featureBuilder.buildFeature(null);
DefaultFeatureCollection featureCollection = new DefaultFeatureCollection("internal", TYPE);
featureCollection.add(pointFeature);
try {
Polygon polygon = new Polygon();
for (int i = 0; i < polygonList.size(); i++) {
String[] splitAxis = (polygonList.get(i).split("\\s+"));
polygon.addPoint(Integer.valueOf(splitAxis[0]), Integer.valueOf(splitAxis[1]));
}
boolean isInside = polygon.contains(33.63705, -112.17563);
System.out.println(isInside);
} catch (Exception e) {
e.printStackTrace();
}
}
} finally {
iterator.close();
}
} catch (Throwable e) {
}
}
}
I knew that converting a double to string and back to integer is not going to work anyway.
How can I achieve the solution whether a point is in the polygon for negative decimated values? Please help.
Using your SimpleFeature, you can call getDefaultGeometry and get a Geometry object. Once you cast to a Geometry, there should be a contains method which would take a Point class.
Also, you don't want to use the java.awt.Polygon class. Instead you'll be using org.locationtech.jts Geometry classes.

java save http post requests hourly

I'm trying to set up a server on aws with simple http server and save each http post request headers & payload.
It works locally.
My steps after connection via ssh to the ec2 server:
javac Server.java
sudo nohup java Server
It saves the headers to log file but not the payload and it doesn't returns 204 response.
Server.java
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.URLDecoder;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executors;
public class Server {
private static final int PORT = 80;
private static final String FILE_PATH = "/home/ec2-user/logs/";
private static final String UTF8 = "UTF-8";
private static final String DELIMITER = "|||";
private static final String LINE_BREAK = "\n";
private static final String FILE_PREFIX = "dd_MM_YYYY_HH";
private static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat(FILE_PREFIX);
private static final String FILE_TYPE = ".txt";
public static void main(String[] args) {
try {
HttpServer server = HttpServer.create(new InetSocketAddress(PORT), 0);
server.createContext("/", new HttpHandler() {
#Override
public void handle(HttpExchange t) throws IOException {
System.out.println("Req\t" + t.getRemoteAddress());
InputStream initialStream = t.getRequestBody();
byte[] buffer = new byte[initialStream.available()];
initialStream.read(buffer);
File targetFile = new File(FILE_PATH + simpleDateFormat.format(new Date()) + FILE_TYPE);
OutputStream outStream = new FileOutputStream(targetFile, true);
String prefix = LINE_BREAK + t.getRequestHeaders().entrySet().toString() + LINE_BREAK + System.currentTimeMillis() + DELIMITER;
outStream.write(prefix.getBytes());
Map<String, String> queryPairs = new HashMap<>();
String params = new String(buffer);
String[] pairs = params.split("&");
for (String pair : pairs) {
int idx = pair.indexOf("=");
String key = pair.substring(0, idx);
String val = pair.substring(idx + 1);
String decodedKey = URLDecoder.decode(key, UTF8);
String decodeVal = URLDecoder.decode(val, UTF8);
queryPairs.put(decodedKey, decodeVal);
}
outStream.write(queryPairs.toString().getBytes());
t.sendResponseHeaders(204, -1);
t.close();
}
});
server.setExecutor(Executors.newCachedThreadPool());
server.start();
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
}
}
}
Consider these changes to your handle method. As a starting point, two things are changed:
It reads the complete input and copies that into your file (initialStream.available() might not be the full truth)
catch, log and rethrow IOExceptions (you didn't see your 204 after all)
Consider redirecting your output into files, so you can check what happend on server later:
sudo nohup java Server > server.log 2> server.err &
If you described in more detail the desired target file structure we could figure something out there as well I guess.
#Override
public void handle(HttpExchange t) throws IOException {
try {
System.out.println("Req\t" + t.getRemoteAddress());
InputStream initialStream = t.getRequestBody();
File targetFile = new File(FILE_PATH + simpleDateFormat.format(new Date()) + FILE_TYPE);
OutputStream outStream = new FileOutputStream(targetFile, true);
// This will copy ENTIRE input stream into your target file
IOUtils.copy(initialStream, outStream);
outStream.close();
t.sendResponseHeaders(204, -1);
t.close();
} catch(IOException e) {
e.printStackTrace();
throw e;
}
}

kryo serializing of class (task object) in apache spark returns null while de-serialization

I am using java spark API to write some test application . I am using a class which doesn't extends serializable interface . So to make the application work I am using kryo serializer to serialize the class . But the problem which I observed while debugging was that during the de-serialization the returned class object becomes null and in turn throws a null pointer exception . It seems to be closure problem where things are going wrong but not sure.Since I am new to this kind of serialization I don't know where to start digging.
Here is the code I am testing :
package org.apache.spark.examples;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
/**
* Spark application to test the Serialization issue in spark
*/
public class Test {
static PrintWriter outputFileWriter;
static FileWriter file;
static JavaSparkContext ssc;
public static void main(String[] args) {
String inputFile = "/home/incubator-spark/examples/src/main/scala/org/apache/spark/examples/InputFile.txt";
String master = "local";
String jobName = "TestSerialization";
String sparkHome = "/home/test/Spark_Installation/spark-0.7.0";
String sparkJar = "/home/test/TestSerializationIssesInSpark/TestSparkSerIssueApp/target/TestSparkSerIssueApp-0.0.1-SNAPSHOT.jar";
SparkConf conf = new SparkConf();
conf.set("spark.closure.serializer","org.apache.spark.serializer.KryoSerializer");
conf.set("spark.kryo.registrator", "org.apache.spark.examples.MyRegistrator");
// create the Spark context
if(master.equals("local")){
ssc = new JavaSparkContext("local", jobName,conf);
//ssc = new JavaSparkContext("local", jobName);
} else {
ssc = new JavaSparkContext(master, jobName, sparkHome, sparkJar);
}
JavaRDD<String> testData = ssc.textFile(inputFile).cache();
final NotSerializableJavaClass notSerializableTestObject= new NotSerializableJavaClass("Hi ");
#SuppressWarnings({ "serial", "unchecked"})
JavaRDD<String> classificationResults = testData.map(
new Function<String, String>() {
#Override
public String call(String inputRecord) throws Exception {
if(!inputRecord.isEmpty()) {
//String[] pointDimensions = inputRecord.split(",");
String result = "";
try {
FileWriter file = new FileWriter("/home/test/TestSerializationIssesInSpark/results/test_result_" + (int) (Math.random() * 100));
PrintWriter outputFile = new PrintWriter(file);
InetAddress ip;
ip = InetAddress.getLocalHost();
outputFile.println("IP of the server: " + ip);
result = notSerializableTestObject.testMethod(inputRecord);
outputFile.println("Result: " + result);
outputFile.flush();
outputFile.close();
file.close();
} catch (UnknownHostException e) {
e.printStackTrace();
}
catch (IOException e1) {
e1.printStackTrace();
}
return result;
} else {
System.out.println("End of elements in the stream.");
String result = "End of elements in the input data";
return result;
}
}
}).cache();
long processedRecords = classificationResults.count();
ssc.stop();
System.out.println("sssssssssss"+processedRecords);
}
}
Here is the KryoRegistrator class
package org.apache.spark.examples;
import org.apache.spark.serializer.KryoRegistrator;
import com.esotericsoftware.kryo.Kryo;
public class MyRegistrator implements KryoRegistrator {
public void registerClasses(Kryo kryo) {
kryo.register(NotSerializableJavaClass.class);
}
}
Here is the class I am serializing :
package org.apache.spark.examples;
public class NotSerializableJavaClass {
public String testVariable;
public NotSerializableJavaClass(String testVariable) {
super();
this.testVariable = testVariable;
}
public String testMethod(String vartoAppend){
return this.testVariable + vartoAppend;
}
}
This is because spark.closure.serializer only supports the Java serializer. See http://spark.apache.org/docs/latest/configuration.html about spark.closure.serializer

Simple (Twitter + Streaming API + Java + OAuth) example

In my quest to create a simple Java program to extract tweets from Twitter's streaming API, I have modified this (http://cotdp.com/dl/TwitterConsumer.java) code snippet to work with the OAuth method. The result is the below code, which when executed, throws a Connection Refused Exception.
I am aware of Twitter4J however I want to create a program that relies least on other APIs.
I have done my research and it looks like the oauth.signpost library is suitable for Twitter's streaming API. I have also ensured my authentication details are correct. My Twitter Access level is 'Read-only'.
I couldn't find a simple Java example that shows how to use the streaming API without relying on e.g. Twitter4j.
import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import oauth.signpost.OAuthConsumer;
import oauth.signpost.commonshttp.CommonsHttpOAuthConsumer;
/**
* A hacky little class illustrating how to receive and store Twitter streams
* for later analysis, requires Apache Commons HTTP Client 4+. Stores the data
* in 64MB long JSON files.
*
* Usage:
*
* TwitterConsumer t = new TwitterConsumer("username", "password",
* "http://stream.twitter.com/1/statuses/sample.json", "sample");
* t.start();
*/
public class TwitterConsumer extends Thread {
//
static String STORAGE_DIR = "/tmp";
static long BYTES_PER_FILE = 64 * 1024 * 1024;
//
public long Messages = 0;
public long Bytes = 0;
public long Timestamp = 0;
private String accessToken = "";
private String accessSecret = "";
private String consumerKey = "";
private String consumerSecret = "";
private String feedUrl;
private String filePrefix;
boolean isRunning = true;
File file = null;
FileWriter fw = null;
long bytesWritten = 0;
public static void main(String[] args) {
TwitterConsumer t = new TwitterConsumer(
"XXX",
"XXX",
"XXX",
"XXX",
"http://stream.twitter.com/1/statuses/sample.json", "sample");
t.start();
}
public TwitterConsumer(String accessToken, String accessSecret, String consumerKey, String consumerSecret, String url, String prefix) {
this.accessToken = accessToken;
this.accessSecret = accessSecret;
this.consumerKey = consumerKey;
this.consumerSecret = consumerSecret;
feedUrl = url;
filePrefix = prefix;
Timestamp = System.currentTimeMillis();
}
/**
* #throws IOException
*/
private void rotateFile() throws IOException {
// Handle the existing file
if (fw != null)
fw.close();
// Create the next file
file = new File(STORAGE_DIR, filePrefix + "-"
+ System.currentTimeMillis() + ".json");
bytesWritten = 0;
fw = new FileWriter(file);
System.out.println("Writing to " + file.getAbsolutePath());
}
/**
* #see java.lang.Thread#run()
*/
public void run() {
// Open the initial file
try { rotateFile(); } catch (IOException e) { e.printStackTrace(); return; }
// Run loop
while (isRunning) {
try {
OAuthConsumer consumer = new CommonsHttpOAuthConsumer(consumerKey, consumerSecret);
consumer.setTokenWithSecret(accessToken, accessSecret);
HttpGet request = new HttpGet(feedUrl);
consumer.sign(request);
DefaultHttpClient client = new DefaultHttpClient();
HttpResponse response = client.execute(request);
BufferedReader reader = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
while (true) {
String line = reader.readLine();
if (line == null)
break;
if (line.length() > 0) {
if (bytesWritten + line.length() + 1 > BYTES_PER_FILE)
rotateFile();
fw.write(line + "\n");
bytesWritten += line.length() + 1;
Messages++;
Bytes += line.length() + 1;
}
}
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("Sleeping before reconnect...");
try { Thread.sleep(15000); } catch (Exception e) { }
}
}
}
}
I tried to simulate the code and found that the error was very simple. You should use https instead of http in the url :)

Categories

Resources