I'm trying to do stitching on android. I used Surf algorithm to serve as finder and I got an error like "try enable OPENCV_ENABLED_NONFREE". I use C++ native code on Android Studio and do stitching in background. I solved the surf error but now, when I test this application on real device, I get this error:
I did some research in another forum, and they say that the problem may be caused by a function that doesn't return the value it's supposed to return. Is there anyone who can help me please. This is the code:
private Handler handler = new Handler(new Handler.Callback() {
#Override
public boolean handleMessage(Message message) {
switch (message.what) {
case HANDLER_START_STITCHING :
{
new Thread()
{
public void run()
{
AsyncTask.execute(new Runnable() {
#Override
public void run() {
String[] source=getDirectoryFilelist("null");
final File resultDir = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + File.separator + "viewwer");
// if (!resultDir.exists())
// resultDir.mkdir();
final String stitchingResultImagePath = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath()) +"/result.jpg"; // + (ANGLE-90) + ".jpg";
if( NativePanorama.jniStitching(source, stitchingResultImagePath, STITCH_IMAGE_SCALE) == 0 )
{
handler.sendMessage(handler.obtainMessage(HANDLER_SET_STITCHING_BUTTON_TEXT,"Stitching success"));
File image = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath());
File result_90 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result90.jpg");
File result_180 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result180.jpg");
File result_270 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result270.jpg");
File result_360 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result360.jpg");
Log.d("GESTION_STITCHING", result_180.toString());
/*if (ANGLE == 450) {
handler.sendMessage(handler.obtainMessage(HANDLER_FINISH_STITCHING,"Stitching success"));
}*/
if (image.exists()) {
File[] files = image.listFiles();
for (int i=0;i<files.length; i++) {
if (files[i].compareTo(result_90) == 0 || files[i].compareTo(result_180) == 0 || files[i].compareTo(result_270) == 0 || files[i].compareTo(result_360) == 0) {
} else {
files[i].delete();
}
}
}
}
else
{
handler.sendMessage(handler.obtainMessage(HANDLER_SET_STITCHING_BUTTON_TEXT,"Stitching error"));
}
}
});
}
}.start();
break;
}
and the following is the native C++ code:
JNIEXPORT jint JNICALL Java_com_priscilla_viewwer_utils_NativePanorama_jniStitching(JNIEnv *env, jclass clazz, jobjectArray source, jstring result, jdouble scale) {
clock_t beginTime, endTime;
double timeSpent;
beginTime = clock();
//init jni call java method
int i = 0;
bool try_use_gpu = true;
vector<Mat> imgs;
Mat img;
Mat img_scaled;
Mat pano;
Mat pano_tocut;
Mat gray;
const char* result_name = env->GetStringUTFChars(result, JNI_FALSE); //convert result
LOGE("result_name=%s",result_name);
LOGE("scale=%f",scale);
int imgCount = env->GetArrayLength(source); //img count
LOGE("source imgCount=%d",imgCount);
for(i=0;i<imgCount;i++)
{
jstring jsource = (jstring)(env->GetObjectArrayElement(source, i));
const char* source_name = env->GetStringUTFChars(jsource, JNI_FALSE); //convert jsource
LOGE("Add index %d source_name=:%s", i, source_name);
img=imread(source_name);
Size dsize = Size((int)(img.cols*scale),(int)(img.rows*scale));
img_scaled = Mat(dsize,CV_32S);
resize(img,img_scaled,dsize);
imgs.push_back(img_scaled);
env->ReleaseStringUTFChars(jsource, source_name); //release convert jsource
}
img.release();
pano = stitchingImages(imgs);
for(i=0;i<imgs.size();i++)
{
imgs[i].release();
}
//cut black edges
//LOGE("stitching success,cutting black....");
pano_tocut = pano;
cvtColor(pano_tocut, gray, CV_BGR2GRAY);
Rect startROI(0,0,gray.cols,gray.rows); // start as the source image - ROI is the complete SRC-Image
cropLargestPossibleROI(gray,pano_tocut,startROI);
gray.release();
imwrite(result_name, pano_tocut);
pano.release();
pano_tocut.release();
env->ReleaseStringUTFChars(result, result_name); //release convert result
endTime = clock();
timeSpent = (double)(endTime - beginTime) / CLOCKS_PER_SEC;
LOGE("success,total cost time %f seconds",timeSpent);
// env->CallVoidMethod(clazz, javaMethodRef, timeSpent);
return 0;
}
I came cross this issues either, and I found it was violating strict aliasing.
cause by casting~
problem:
uint32_t i32 = *((uint32_t*)m_data);
solution:
uint32_t i32 = 0;
char* p = (char*)&i32;
for(int i =0;i < 4;i++)
{
p[i] = m_data[i];
}
Related
Those are Java functions in question:
targetFunctionPtr1 is public static native long nInitSpxEncoder();
targetFunctionPtr is public static native long nEncodeBuffer(long j12, short[] sArr, byte[] bArr, long j13);
Sample pseudo Java code
long nInitSpxEncoder = LibSpeex.nInitSpxEncoder();
long nEncodeBuffer = LibSpeex.nEncodeBuffer(nInitSpxEncoder, sArr, bArr, 65536);
called below via JNI
char b[16]={0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0x00};
jinput = env->NewStringUTF(b);
jclass stringClass = env->FindClass("java/lang/String");
jmethodID getBytesMId = env->GetMethodID(stringClass, "getBytes", "()[B");
in1 = (jbyteArray)env->CallObjectMethod( jinput, getBytesMId);
jlong init;
init = targetFunctionPtr1(env, CallerObj1);
jshort outCArray[] = {100};
jshortArray retval = env->NewShortArray(10000); // allocate
env->SetShortArrayRegion(retval, 0 , 1, outCArray);
nenc = targetFunctionPtr(env, CallerObj, init, retval, in1, 10);
Don't get why above segfaults. It calls init, that works, but in second call nEncodeBuffer it segfaults. I see in debugger it loads byte array and shorts array, but cannot figure out why it does not return, just loops until segfault.
I mean I call it via JNI but I would expect targetFunctionPtr to return. It seems it loops somehow, until segmentation occurs.
This is sample implementation of the JNI in Java
long nInitSpxEncoder = LibSpeex.nInitSpxEncoder();
if (nInitSpxEncoder != 0) {
int nGetEncoderFrameSize = (int) LibSpeex.nGetEncoderFrameSize(nInitSpxEncoder);
System.err.println(String.format("Frame size = %d", Integer.valueOf(nGetEncoderFrameSize)));
short[] sArr = new short[nGetEncoderFrameSize];
AudioRecord audioRecord2 = null;
byte[] bArr3 = bArr2;
long j12 = 0;
while (j12 < this.f13733d) {
try {
synchronized (this) {
if (audioRecord2 != this.f13731b) {
Log.i("AudioPttRecorder", "source changed");
}
audioRecord = this.f13731b;
}
if (audioRecord == null || audioRecord.read(sArr, 0, nGetEncoderFrameSize) != nGetEncoderFrameSize) {
break;
}
if (this.f13730a != null) {
this.f13730a.a(j12, a(nGetEncoderFrameSize, sArr));
}
long j13 = j12;
int i12 = nGetEncoderFrameSize;
short[] sArr2 = sArr;
long nEncodeBuffer = LibSpeex.nEncodeBuffer(nInitSpxEncoder, sArr, bArr, 65536);
if (nEncodeBuffer >= 0) {
Log.d("AudioPttRecorder", String.format("Write packet len=%d", Long.valueOf(nEncodeBuffer)));
bArr3[0] = (byte) (255 & nEncodeBuffer);
bArr3[1] = (byte) (nEncodeBuffer >> 8);
byte[] bArr4 = bArr3;
bufferedOutputStream.write(bArr4, 0, 2);
bufferedOutputStream.write(bArr, 0, (int) nEncodeBuffer);
bArr3 = bArr4;
nGetEncoderFrameSize = i12;
sArr = sArr2;
j12 = j13 + 1;
audioRecord2 = audioRecord;
} else {
throw new RuntimeException("Something wrong with encoding Speex");
}
} finally {
LibSpeex.nDestroySpxEncoder(nInitSpxEncoder);
Log.i("AudioPttRecorder", "exited");
}
}
bufferedOutputStream.flush();
Log.d("AudioPttRecorder", "finished");
return;
}
My code using JNI directly does similar.
Any ideas what is wrong with the native code?
How should I call Libspx to encode a byte array in native code?
I am trying to record my vocals and then merge them with an audio file together using OPENSL ES Library. I found this GitHub sample called Native-Audio. It merges the two audios. But the background audio file is playing much faster than the actual rate in the final output.
Please use headphones to notice the difference.
Samples Links: Before and After
Also, it uses the files from the assets folder only. How can I manually select MP3 files from file manager?
private void mixAudio(){
try {
if (!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) ||
!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED))
{
// Show rationale and request permission.
ActivityCompat.requestPermissions(this,
new String[]{android.Manifest.permission.READ_EXTERNAL_STORAGE, android.Manifest.permission.WRITE_EXTERNAL_STORAGE},
1000);
}
else {
buttonMix.setEnabled(false);
buttonMix.setText("MIXING....");
textViewMixPath.setText("");
buttonPlay.setEnabled(false);
buttonRecord.setEnabled(false);
buttonStart.setEnabled(false);
listView.setEnabled(false);
Thread thread = new Thread(new Runnable() {
#Override
public void run() {
try{
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
//File file = new File(baseDir + "/mix.wav");
String path = Environment.getExternalStorageDirectory().getPath() + "/VocalRecorder";
File fileParent = new File(path);
if (!fileParent.exists()){
fileParent.mkdir();
}
final File file = new File(fileParent.getPath() + "/mix.wav");
//String author = getApplicationContext().getPackageName() + ".provider";
//Uri videoUri = FileProvider.get(this, author, mediaFile);
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//MediaMuxer muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
String beat = beats[selectedBeat];
//beat = beat.replace(".wav", ".mp3");
AssetFileDescriptor afd = getAssets().openFd(beat);
MediaCodec codec = null;
//ByteBuffer outputBuffer;
//short[] data; // data for the AudioTrack playback
//int outputBufferIndex = -1;
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
final long duration = Long.parseLong(durationStr);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
// right now I am pointing to a URI but I have tested that both will
// play the media file using MediaPlayer
int sampleRate = 0;
int numChannels = 0;
int dstIndex = -1;
int numTracks = extractor.getTrackCount(); //This says 1
for (int i = 0; i < numTracks; ++i) { // so this will just run once
MediaFormat format = extractor.getTrackFormat(i); // getting info so it looks good so far
String mime = format.getString(MediaFormat.KEY_MIME); // "audio/mpeg"
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, null, 0);
//format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
//dstIndex = muxer.addTrack(format);
//writer.setFrameRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
//writer.setSamplesPerFrame(format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
//writer.setBitsPerSample(16);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
break;
}
}
// Calculate the number of frames required for specified duration
long numFrames = (long)(duration * sampleRate/1000);
// Create a wav file with the name specified as the first argument
WavFile wavFile = WavFile.newWavFile(file, numChannels, numFrames, 16, sampleRate);
if (codec == null) {
throw new IllegalArgumentException("No decoder for file format");
}
//ByteBuffer[] inputBuffers = decoder.getInputBuffers();
//ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
/*
Boolean eosReceived = false;
while (!eosReceived) {
int inIndex = decoder.dequeueInputBuffer(1000);
if (inIndex >= 0) {
ByteBuffer buffer = decoder.getInputBuffer(inIndex);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to mDecoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
int outIndex = decoder.dequeueOutputBuffer(info, 1000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
//outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = decoder.getOutputFormat();
Log.d("DecodeActivity", "New format " + format);
//audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outBuffer = decoder.getOutputBuffer(outIndex);
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer);
final byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
//audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
decoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
}
*/
short recordedData[] = recordedData();
int recordMixStartIndex = -1;
//muxer.start();
codec.start();
Boolean sawInputEOS = false;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo infoMux = new MediaCodec.BufferInfo();
int count = 0;
while (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_US);
Log.i(LOG_TAG, "inputBufIndex : " + inputBufIndex);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffer(inputBufIndex);
int sampleSize = extractor.readSampleData(dstBuf, 0);
Log.i(LOG_TAG, "sampleSize : " + sampleSize);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.i(LOG_TAG, "Saw input end of stream!");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
Log.i(LOG_TAG, "presentationTimeUs " + presentationTimeUs);
}
codec.queueInputBuffer(inputBufIndex,
0, //offset
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
Log.i(LOG_TAG, "extractor.advance()");
extractor.advance();
}
}
final int res = codec.dequeueOutputBuffer(info, TIMEOUT_US);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
//final byte[] chunk = new byte[info.size];
//buf.get(chunk); // Read the buffer all at once
short[] shortArray = new short[info.size/2];
buf.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArray);
buf.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
if (shortArray.length > 0) {
//mAudioTrack.write(chunk, 0, chunk.length);
//infoMux.presentationTimeUs = info.presentationTimeUs;
//infoMux.flags = info.flags;
//muxer.writeSampleData(dstIndex, ByteBuffer.wrap(chunk),
// infoMux);
long []longData = new long[shortArray.length];
// Merge data with vocal
// Calculate the time
final long bufferTimer = info.presentationTimeUs/1000;
int vocalCount = 0;
for (int i = 0; i < shortArray.length; i ++) {
//writer.writeShortLittle(shortArray[i]);
long offsetTime = i*1000/(sampleRate*2); // 2 channels
Boolean mixed = false;
if ((offsetTime + bufferTimer > recordStartTime) && (offsetTime + bufferTimer <= recordStopTime + 500)){
if (recordMixStartIndex == -1){
recordMixStartIndex = 0;
}
if (recordMixStartIndex < recordedData.length){
//Log.i("TAG", "############ mix record data: " + recordMixStartIndex);
longData[i] = TPMixSamples((int)(recordedData[recordMixStartIndex]), (int)shortArray[i]/3);
if (vocalCount >= 3) {
recordMixStartIndex++;
vocalCount = 0;
}
else{
vocalCount ++;
}
mixed = true;
}
}
else {
// All done, set sawInputEOS to stop mixing
if (bufferTimer > recordStopTime + 500){
sawInputEOS = true;
}
}
if (!mixed) {
longData[i] = shortArray[i];
}
}
Log.i("TAG", "############ write frames: " + longData.length/2);
wavFile.writeFrames(longData, longData.length/2);
count ++;
if (count % 5 == 0){
runOnUiThread(new Runnable() {
#Override
public void run() {
long percent = bufferTimer*100/duration;
buttonMix.setText("MIXING..." + percent + "%");
}
});
}
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawInputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//codecOutputBuffers = codec.getOutputBuffers();
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
Log.d(LOG_TAG, "Output format has changed to " + oformat);
//mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
// Close the wavFile
wavFile.close();
// muxer.stop();
// muxer.release();
codec.stop();
codec.release();
extractor.release();
runOnUiThread(new Runnable() {
#Override
public void run() {
buttonMix.setText("MIX DONE");
buttonPlay.setEnabled(true);
buttonRecord.setEnabled(true);
textViewMixPath.setText(file.getPath());
buttonStart.setEnabled(true);
listView.setEnabled(true);
}
});
}
catch (Exception e){
}
}
});
thread.start();
}
}
catch (Exception e){
e.printStackTrace();
}
}
private final int INT16_MIN = - 32768;
private final int INT16_MAX = 32767;
private long TPMixSamples(int a, int b) {
if (a > INT16_MAX) {a = INT16_MAX;}
if (a < INT16_MIN) {a = INT16_MIN;}
return
// If both samples are negative, mixed signal must have an amplitude between the lesser of A and B, and the minimum permissible negative amplitude
a < 0 && b < 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MIN) :
// If both samples are positive, mixed signal must have an amplitude between the greater of A and B, and the maximum permissible positive amplitude
( a > 0 && b > 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MAX)
// If samples are on opposite sides of the 0-crossing, mixed signal should reflect that samples cancel each other out somewhat
:
a + b);
}
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native void createBufferQueueAudioPlayer(int sampleRate, int samplesPerBuf);
/////
public static native boolean createAssetAudioPlayer(AssetManager assetManager, String filename);
// true == PLAYING, false == PAUSED
public static native void setPlayingAssetAudioPlayer(boolean isPlaying);
public static native int getDurationAssetAudioPlayer();
public static native int getCurrentPositionAssetAudioPlayer();
//////
public static native boolean createUriAudioPlayer(String uri);
public static native void setPlayingUriAudioPlayer(boolean isPlaying);
public static native void setLoopingUriAudioPlayer(boolean isLooping);
public static native void setChannelMuteUriAudioPlayer(int chan, boolean mute);
public static native void setChannelSoloUriAudioPlayer(int chan, boolean solo);
public static native int getNumChannelsUriAudioPlayer();
public static native void setVolumeUriAudioPlayer(int millibel);
public static native void setMuteUriAudioPlayer(boolean mute);
public static native void enableStereoPositionUriAudioPlayer(boolean enable);
public static native void setStereoPositionUriAudioPlayer(int permille);
public static native boolean selectClip(int which, int count);
public static native void stopClip();
public static native boolean enableReverb(boolean enabled);
public static native boolean createAudioRecorder();
public static native void startRecording();
public static native void stopRecording();
public static native void pauseRecording();
public static native void resumeRecording();
public static native short[] recordedData();
public static native double recordedDuration();
public static native void shutdown();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-audio-jni");
}
I'm working on a simple (I thought) OpenVR driver that sends the compositors output to my android phone to display with google cardboard. I've got the basics working but when my driver receives the rotation data from my android device it's very noisy and with my phone sat still on my desk it jitters loads.
the floats are being sent over in network byte order.
Here is the code receiving the rotation data:
float BytesToFloat(unsigned char* buffer, int start)
{
float f = (buffer[start] << 24) | (buffer[start + 1] << 16) | (buffer[start + 2] << 8) | buffer[start + 3];
return f;
}
void MobileDeviceReceiver::PoseThread(std::function<void(MobileVector*)> PoseUpdateCallback)
{
sockaddr_in client;
int inLen,clientStructLen = sizeof(client);
MobileVector currentPose;
DriverLog("started pose tracking thread %d\n",m_isActive);
bool errorLastCall = false;
char pchBuffer[65535];
while (m_isActive)
{
if ((inLen = recvfrom(m_socket, pchBuffer, 65535, 0, (sockaddr*)&client, &clientStructLen)) == SOCKET_ERROR)
{
if (errorLastCall == false)
{
DriverLog("Error receiving data: %d\n", WSAGetLastError());
}
errorLastCall = true;
}
else {
errorLastCall = false;
}
currentPose.x = floorf(BytesToFloat((unsigned char*)pchBuffer, 0)*10000)/10000;
currentPose.y = 0;//BytesToFloat((unsigned char*)pchBuffer, 4);
currentPose.z = 0;//BytesToFloat((unsigned char*)pchBuffer, 8);
PoseUpdateCallback(¤tPose);
}
}
And the code sending it from the phone:
class Task implements Runnable
{
public Queue<DatagramPacket> packets = new LinkedList<DatagramPacket>();
private boolean isActive = true;
#Override
public void run() {
try{
DatagramSocket socket = new DatagramSocket();
while(isActive)
{
if(packets.size() > 0)
{
socket.send(packets.remove());
}
}
}catch(Exception e)
{
e.printStackTrace();
}
}
public void Kill()
{
isActive = false;
}
}
ByteBuffer buffer = ByteBuffer.allocate(12);
protected float ALPHA = 0.05f;
private float[] lowPassFilter(float[] input,float[] output)
{
if(output == null) return input;
for ( int i=0; i<input.length; i++ ) {
output[i] = output[i] + ALPHA * (input[i] - output[i]);
}
return output;
}
#Override
public void onSensorChanged(SensorEvent sensorEvent) {
if(sensorEvent.sensor == sRotation)
{
float x,y,z;
x = (float) Math.toRadians(sensorEvent.values[0]);
y = (float) Math.toRadians(sensorEvent.values[1]);
z = (float) Math.toRadians(sensorEvent.values[2]);
float[] values = lowPassFilter(new float[]{x,y,z},new float[3]);
buffer.order(ByteOrder.BIG_ENDIAN);
buffer.putFloat(values[0]);
buffer.putFloat(values[1]);
buffer.putFloat(values[2]);
try {
DatagramPacket packet = new DatagramPacket(buffer.array(), 12, InetAddress.getByName(IP), 8888);
if(task != null) {
task.packets.add(packet);
}
}catch(Exception e)
{
e.printStackTrace();
}
buffer.clear();
}
}
Thanks in advance
For future reference I was doing two things wrong:
I was sending the float in network byteorder and not rearranging it to native byte order on the local machine.
Bitshifts don't seem to work for floats, so I used memset instead.
After changing these two things it all worked perfectly.
I have such a big problem with implementation the svm_predict function. I have trained svm, and prepare datatest. Both files are in .txt. file.Datatest are from LBP( Local Binary patterns) and it looks like:
-0.6448744548418511
-0.7862774302452588
1.7746263060948377
I'm loading it to the svm_predict function and at my console after compiling my program there is:
Accuracy = 0.0% (0/800) (classification)
So it's look like it can't read datatest?
import libsvm.*;
import java.io.*;
import java.util.*;
class svm_predict {
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
private static void predict(BufferedReader input, DataOutputStream output, svm_model model, int predict_probability) throws IOException
{
int correct = 0;
int total = 0;
double error = 0;
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
int svm_type=svm.svm_get_svm_type(model);
int nr_class=svm.svm_get_nr_class(model);
double[] prob_estimates=null;
if(predict_probability == 1)
{
if(svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
{
System.out.print("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+svm.svm_get_svr_probability(model)+"\n");
}
else
{
int[] labels=new int[nr_class];
svm.svm_get_labels(model,labels);
prob_estimates = new double[nr_class];
output.writeBytes("labels");
for(int j=0;j<nr_class;j++)
output.writeBytes(" "+labels[j]);
output.writeBytes("\n");
}
}
while(true)
{
String line = input.readLine();
if(line == null) break;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
double target = atof(st.nextToken());
int m = st.countTokens()/2;
svm_node[] x = new svm_node[m];
for(int j=0;j<m;j++)
{
x[j] = new svm_node();
x[j].index = atoi(st.nextToken());
x[j].value = atof(st.nextToken());
}
double v;
if (predict_probability==1 && (svm_type==svm_parameter.C_SVC || svm_type==svm_parameter.NU_SVC))
{
v = svm.svm_predict_probability(model,x,prob_estimates);
output.writeBytes(v+" ");
for(int j=0;j<nr_class;j++)
output.writeBytes(prob_estimates[j]+" ");
output.writeBytes("\n");
}
else
{
v = svm.svm_predict(model,x);
output.writeBytes(v+"\n");
}
if(v == target)
++correct;
error += (v-target)*(v-target);
sumv += v;
sumy += target;
sumvv += v*v;
sumyy += target*target;
sumvy += v*target;
++total;
}
if(svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
{
System.out.print("Mean squared error = "+error/total+" (regression)\n");
System.out.print("Squared correlation coefficient = "+
((total*sumvy-sumv*sumy)*(total*sumvy-sumv*sumy))/
((total*sumvv-sumv*sumv)*(total*sumyy-sumy*sumy))+
" (regression)\n");
}
else
System.out.print("Accuracy = "+(double)correct/total*100+
"% ("+correct+"/"+total+") (classification)\n");
}
private static void exit_with_help()
{
System.err.print("usage: svm_predict [options] test_file model_file output_file\n"
+"options:\n"
+"-b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); one-class SVM not supported yet\n");
System.exit(1);
}
public static void main(String argv[]) throws IOException
{
int i, predict_probability=0;
// parse options
for(i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
++i;
switch(argv[i-1].charAt(1))
{
case 'b':
predict_probability = atoi(argv[i]);
break;
default:
System.err.print("Unknown option: " + argv[i-1] + "\n");
exit_with_help();
}
}
if(i>=argv.length-2)
exit_with_help();
try
{
BufferedReader input = new BufferedReader(new FileReader(argv[i]));
DataOutputStream output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(argv[i+2])));
svm_model model = svm.svm_load_model(argv[i+1]);
if(predict_probability == 1)
{
if(svm.svm_check_probability_model(model)==0)
{
System.err.print("Model does not support probabiliy estimates\n");
System.exit(1);
}
}
else
{
if(svm.svm_check_probability_model(model)!=0)
{
System.out.print("Model supports probability estimates, but disabled in prediction.\n");
}
}
predict(input,output,model,predict_probability);
input.close();
output.close();
}
catch(FileNotFoundException e)
{
exit_with_help();
}
catch(ArrayIndexOutOfBoundsException e)
{
exit_with_help();
}
}
}
It's difficult to know becasue its a big process
make sure you follow their classification guide
the data should be scaled it seems it goes above 1 right now
I need to add animated gif in form of spinner, into some SWT widget like for example Label. This Label will be as default labelSpiner.setEnabled(false). When I start some long duration operation this Label with spinner will show (labelSpinner.setEnabled(true))in right corner of window.
Normaly Image can be added to Label by labelSpinner.setImage(arg0)
If I add this SPINNER.GIF normal way into Label, it wont animate, its only static Image.
Does anybody know how to add animated gif (for example some spinner) into SWT based JAVA SE app widget? I browsed many examples but many of them were useless, and those good were too complicated.
I would like to do it very simple.
Can somebody help?
Did you try the Eclipse article about SWT Images?
This part will load the image and display it in a Canvas:
ImageLoader loader = new ImageLoader();
loader.load(getClass().getResourceAsStream("Idea_SWT_Animation.gif"));
Canvas canvas = new Canvas(shell,SWT.NONE);
image = new Image(display,loader.data[0]);
int imageNumber;
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener(){
public void paintControl(PaintEvent event){
event.gc.drawImage(image,0,0);
}
});
And this part updates the gif:
Thread thread = new Thread(){
public void run(){
long currentTime = System.currentTimeMillis();
int delayTime = loader.data[imageNumber].delayTime;
while(currentTime + delayTime * 10 > System.currentTimeMillis()){
// Wait till the delay time has passed
}
display.asyncExec(new Runnable(){
public void run(){
// Increase the variable holding the frame number
imageNumber = imageNumber == loader.data.length-1 ? 0 : imageNumber+1;
// Draw the new data onto the image
ImageData nextFrameData = loader.data[imageNumber];
Image frameImage = new Image(display,nextFrameData);
gc.drawImage(frameImage,nextFrameData.x,nextFrameData.y);
frameImage.dispose();
canvas.redraw();
}
});
}
};
shell.open();
thread.start();
After trying at least 3 different animated GIF examples, none of which worked, I started working on my own, based mainly on the answer above.
Here is a complete running example including:
a base64 decoder (courtesy of http://www.source-code.biz/base64coder/java/ )
spinner GIF
main method
Remove the main method, base64 methods, and image data, and you will have a working animated GIF canvas.
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.ImageLoader;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
public class AnimatedGif extends Canvas {
public static void main(String[] args) throws IOException {
byte[] d = decode(b64.toCharArray(), 0, b64.length());
Display display = new Display ();
Shell shell = new Shell(display);
shell.setBounds(0, 0, 100, 100);
AnimatedGif gif = new AnimatedGif(shell, SWT.NONE);
gif.setLocation(10,10);
gif.setSize(16, 16);
gif.load(new java.io.ByteArrayInputStream(d));
shell.open();
gif.animate();
while (!shell.isDisposed ()) {
if (!display.readAndDispatch ()) display.sleep ();
}
display.dispose ();
}
private final ImageLoader loader = new ImageLoader();
private int img = 0;
private volatile boolean animating = false;
private Thread animateThread;
public AnimatedGif(Composite parent, int style) {
super(parent, style);
}
public void load (InputStream resource) throws IOException {
loader.load(resource);
}
public void animate() {
if (animateThread == null) {
animateThread = createThread();
animateThread.setDaemon(true);
}
if (animateThread.isAlive())
return;
animateThread.start();
}
public void stop() {
animating = false;
if (animateThread != null)
try {
animateThread.join();
animateThread = null;
} catch (InterruptedException e) {
// do nothing
}
}
private Thread createThread() {
return new Thread() {
long currentTime = System.currentTimeMillis();
final Display display = getParent().getDisplay();
public void run() {
animating = true;
while(animating) {
img = (img == loader.data.length-1) ? 0 : img + 1;
int delayTime = Math.max(50, 10*loader.data[img].delayTime);
long now = System.currentTimeMillis();
long ms = Math.max(currentTime + delayTime - now, 5);
currentTime += delayTime;
try {
Thread.sleep(ms);
} catch(Exception e) {
return;
}
if (!display.isDisposed())
display.asyncExec(new Runnable() {
#Override
public void run() {
ImageData nextFrameData = loader.data[img];
Image frameImage = new Image(display, nextFrameData);
new GC(AnimatedGif.this).drawImage(frameImage, nextFrameData.x, nextFrameData.y);
frameImage.dispose();
//canvas.redraw();
}
});
}
Display.getDefault().asyncExec(new Runnable() {
#Override
public void run() {
new GC(AnimatedGif.this).fillRectangle(
0,
0,
getBounds().width,
getBounds().height);
}
});
}
};
}
private static final char[] map1 = new char[64];
static {
int i = 0;
for (char c = 'A'; c <= 'Z'; c++)
map1[i++] = c;
for (char c = 'a'; c <= 'z'; c++)
map1[i++] = c;
for (char c = '0'; c <= '9'; c++)
map1[i++] = c;
map1[i++] = '+';
map1[i++] = '/';
}
private static final byte[] map2 = new byte[128];
static {
for (int i = 0; i < map2.length; i++)
map2[i] = -1;
for (int i = 0; i < 64; i++)
map2[map1[i]] = (byte) i;
}
public static byte[] decode(char[] in, int iOff, int iLen) {
if (iLen % 4 != 0)
throw new IllegalArgumentException(
"Length of Base64 encoded input string is not a multiple of 4.");
while (iLen > 0 && in[iOff + iLen - 1] == '=')
iLen--;
int oLen = (iLen * 3) / 4;
byte[] out = new byte[oLen];
int ip = iOff;
int iEnd = iOff + iLen;
int op = 0;
while (ip < iEnd) {
int i0 = in[ip++];
int i1 = in[ip++];
int i2 = ip < iEnd ? in[ip++] : 'A';
int i3 = ip < iEnd ? in[ip++] : 'A';
if (i0 > 127 || i1 > 127 || i2 > 127 || i3 > 127)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int b0 = map2[i0];
int b1 = map2[i1];
int b2 = map2[i2];
int b3 = map2[i3];
if (b0 < 0 || b1 < 0 || b2 < 0 || b3 < 0)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int o0 = (b0 << 2) | (b1 >>> 4);
int o1 = ((b1 & 0xf) << 4) | (b2 >>> 2);
int o2 = ((b2 & 3) << 6) | b3;
out[op++] = (byte) o0;
if (op < oLen)
out[op++] = (byte) o1;
if (op < oLen)
out[op++] = (byte) o2;
}
return out;
}
static String b64 =
"R0lGODlhEAAQAPf/AMra5KO9zOzz9s3b5MDS3aS+zZOwwsrZ4+bt89jm7IKftuLr" +
"8ZCswH6bs9vn7qrD0+Ts8oqnvYKetomlu8PV4t7p79Pg6dzp79vm7q/F1aC6zdTi" +
"6bbL2XqYsdDf55SwxK/H1ajC0Yajudzp7q7E087c59Xi6niWr8XY4qW/0LnM2aO9" +
"zoyov7bK2djk7ICbtIOhtt7p7pq1yc3b5sDU38DR39vm7LDH1afA0Yqovebu85Gt" +
"wouovsvZ5oCdtunv9IaiuXyZscrb5qnC0Zezxdjj64WiuZ65y7vQ3N/p8OLq8avE" +
"1MTX4o6rwJaxxLnN29jm7a7F1dvm7Y2qwH2bs7zQ3Jizx9Df6djj7HmXr+Tt8rvO" +
"26/E1LrO3KC5zKW+z8/e577Q3oCctYShubTK2HyasZGtwHuYsdTg6enw89/p76zE" +
"04iku+Lr8LXJ2JKvw+Xt8tHf583b57vP3pu2ycjY45m0yNvp7qzE1bHI13qZsaK8" +
"zn2aspq0yODp8Nfi6svb5oaiuuDp76vE09Hf6ZWzxMfY4qnC087e5+rx9eLs8pSv" +
"xL/R3rPI2Nfj64ajuq7E1Y2pv7rQ2+Lp8J65yarC1JKvxLzR3nybsrzQ3unw9HmY" +
"sLvR3LPI17nO28rY5Iqlvc3c5urx9Iakus/f53iVrtPi6cnY436btNTi6tfi67PK" +
"1rHI2H6bspy3yJ63ydXi7JezxKjA0ZGuwNjk64CbtazE0Zezx8DR3NXi68TX4Nvn" +
"7e7097vP24CctH2atIqlvN7o7tPh6Nzn7JGvwK/F1MbX4dHg6MfY4b7S3c/e5n2b" +
"srjN2ezy9eDq78rb4svb473R2+Ts8cHV3r7R3eXu8rfN2JSvwo+tv9/p7pSxw6C7" +
"ytbj6cTX36zD1NHg5vL3+KnB0Onx9cPV35y4yLPJ1dnl7LnO2cHU3avD0bnP2sDV" +
"4pKvwdfk7O7z99Xi6XqXsNTi6N3o7sjY4ubv8s7c5qvE0env842svuLs8Z64yJm0" +
"xZq2x46svuLq8P///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQFAAD/ACwAAAAAEAAQ" +
"AAAIvQD/CRR4poEEf0q0DFz478SZFwqAxPC3AAEChqU6GAQSwcHEihcFntAoQUQE" +
"BgkcVKCI4IdAkiYZfCiScmXFfzBzMFhYk6K/jScZ/uspSEFMJ0KxpIxxAQgPBk5k" +
"CDVRBIoDDE8/yNAg1MIGmglkyvDy5cHCEh4smIC14d/WLyGGQGpBA4UQtBYsCDyy" +
"AseDByBaqKhhqAfaKwNTPIiyBAQZFUjqGmY4KEMGEG5UdAlDgYlQgQ9IdOJQhRPD" +
"gAAh+QQFAAD/ACwAAAAAEAAQAAAI1gD/CRR4IkiZBjx0DFwoMIueBj58EBGlRAtD" +
"h1TEKAikgJUOfxAWbsKk0QgbFjmEJEkiEIFAVBLGjMoxZUcfG1AEhvz3YiOoSAz/" +
"3VHzD8EPIxNYMPgQVMqFSRAWsGjyhoidoAmkVFjgb8eHPq80BMWS4I4fQZboaFiB" +
"I6gJFxhGXDiyJ4WsIZUWItrwxwYGF/9ShHjwIEOLGkwAkbJgAgvgwHjwLLnB4QmF" +
"HnI8oNmwYSCJDCDycPBUA8UMMHGuMHxAQvSTTBTqzBASVCCIFlWQ1KDAMCAAIfkE" +
"BQAA/wAsAAAAABAAEAAACMsA/wkUeKZDmSANfAxc+K8BlRe+FNRSMIbKMoYwFCjI" +
"CAOGEY1iqgyMMIHkhJIRgE14JImXEoEMdpiJyYBBpBxN5mjyB+GfEytWiNyyspCR" +
"ohE8f2jQ4GUpw38JLvBE8CVFChxDnkadOmTQoRCQtEqFoKNYlAdrMohNyiVDlBst" +
"5ix0lAAphAX/uNwgoyIMDQpMdJWAgrSNQC6NODyZQ4NJnVAWCFeYNLDFli6MDM3w" +
"gCZXgl0XGFaRVAPFZjQmHEl5KhCaaQ+EKDAMCAAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"2wD/CRRIJUiHLCdODFz47xEMCT5eNOBjMAjDKTkisBFh5J+CiK3KDFy06I2ZJix4" +
"TJggYowCPr8EHqEjg4iTDzuaTGExQYEVQP9WCN3j5cjCKXYuIfihZEiIEDhwMBSo" +
"KMYCLSSiHAqxZKoNB0kW/MvAZQmJDFP/jfi3AMEaEDcacZgqZSACEjfIcOjCZKEL" +
"DGr+IdDxz42KJ0hQCEFkQVUCB34WEP6nYssWGqfkxDH1B8MIfwv8DUQShsKnEoQ2" +
"FMEQI4lohhR6gLFg4q+DCmn/zQBzZUMqGwwDAgAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"uAD/CRTIgAcQBQ06DFz474iMDwVFSEh4gmGKFRoeMogAZGKHUgMHPcDxxYsMJxsl" +
"UhSYIcqDEF8yosyh8sy/BxmWPBjyYGFKCS/OkABBFBLDfxsPNujkhkyLFkchcpTA" +
"QYVVGkeLJHAQw1+VLkhqoDi6tasSTmFoGBJSYuHWCv4WaPlHAUWPEh4smMDyNi4C" +
"gUzu5t2g1QHcBQj+CsSrtwgUw34VD7xiAVZZvz+OCtyQAMMFQf4YBgQAIfkEBQAA" +
"/wAsAAAAABAAEAAACNYA/wkUeMTSDhZGXgxc+C9Fij10PjSZoADVJoYk8IRIoaHP" +
"GxaBJGDKMvBBBjwPZK14RYQBqDFi9AwkAWLJgyE4NNj5EGmUAiokQeTJcyNDJYY5" +
"jMQ80YKD0xYM/01ho6BBkCpPPD2pEfVgIB9lkGSqQYFJ1D45FPhoQBZFD0BRbQhh" +
"RYQHhToz5JBCxBBKEh2idPybAcaDhQ0msCSQcudfEn9KBAqJg8bEHxeLL6gRCEHL" +
"wCsbsNjAcKfCpH8Qov7b4ALDCD8LICD4N1u1iwuC/C34wTAgACH5BAUAAP8ALAAA" +
"AAAQABAAAAjNAP8JFMil2JAvGpwMXPiPC5cMUQal0GCFQQSGLRrdiPLgUAovVnZM" +
"gDGwyhYOZG6sCYFDAxEzERQ0ECipyxMVLTJAGqLhFoMJCqj8o1CD0ZwwcxaGHPni" +
"DAoUhmjQYPjvpwJfHWZoZUKBKkwYCsp4GFunK8NIEWDUCkIIDZpQuqjmAGZEJgUT" +
"uSyUcLSQUROgY8b8c5QACpQEiBErmvMoqEApu0ZIvkD5wghNksQsG3ihgr/PoD/z" +
"qsJwUhsIqFNDUEJV4IIfOhAg+MEwIAAh+QQFAAD/ACwAAAAAEAAQAAAI1wD/CRRI" +
"Yk0GEkNWDFz4T4WbGyC4RAmx4sgihki2qCBzY8khinQWTRlIIcyWJxwakQiBY4+M" +
"NzkeCaRAgQaSLhwyLMHhhYiZCDD+zejx6RQKJguPOGnCRgIVMGBKyBHC8N8HFiJ8" +
"BLligVAcRFV38DDyosMGExtMWajaZMK/BllSuSjyR1XVKRMU8DlhA4PfBC4WTsGq" +
"9cQ/BzFGOMAg5Z+Nf3YmjCkrsEISf37U/FOs6JICBa2CDPS3YMHAJDEQWOFThqE/" +
"HQj+lV7wA9CvqgJhx9aihGFAACH5BAUAAP8ALAAAAAAQABAAAAjAAP8JFMipCodO" +
"JB4MXPiPCYUwXVS4AZEhwyCGJXqgoIFEBRkQS6I8SDHwioeMhmqoaAHiwQMcK44I" +
"tGDhpJCNLSANCfFFg4x/G2CZqFli4YMvXmR8YJAgQZENFhj+87mUBwYHUIqYkCrD" +
"CQMeQC7EcJAAi1SvEUQoEOSvAlmpDCIAkdDA34K2bwcyyPGPbod/CO66dbo0Ld2B" +
"PwLjTRBXhN8TAxEgEDhWrt9SDCXfjQFEwYszkKX+06LEH90zDAMCACH5BAkAAP8A" +
"LAAAAAAQABAAAAjWAP8JFEihBpIqLUAMXPhPyIw6FDI94ZCHxAOGV+KAmYGihieK" +
"IDKQGLhhAxoPcnpQmHhjCR48KQS6wGLCAilATGq0yPDgQYiYLjDY+LMB0cJKQ2Sl" +
"2HPkwggMLkww/IdjhQY6lgT5uZMAy1QNr/p82OFvQQUpCabaIfKmCYsFECZdkDL1" +
"AwMWE4z8QPBPzZ2pkUAFUvBCIASBUGz02TElx6gxElAJ5PsvSRIhOViwMaJADKZN" +
"CyH408FKwWAxVPRkYahFiSgiPnw0UD11oA4eDcoEOcEwIAAh+QQJAAD/ACwAAAAA" +
"EAAQAAAIyAD/CRw4zAYtYdwGKvyXz1m2YAaF1XsGbWE1aW3UQKQVb8CBA8YG3qMH" +
"Z4E8c++OKQMwD9k4XALTpClp7tsxD8oGIOs2DaamRDPbOFMIwBi6ZARwuQMKp83C" +
"fwSiVUEqgKnThdTU9YrWq5nVp73OMWNmzWvQp6tWlUv7j6m4AQpt2Utny1a4tmnc" +
"AQhAaVuAAAUABxaoCdwzfq70UaJEzpUrSo8HQjNQqNC+WLEKZda8cNYsIoWuFTLA" +
"bhaxpwK1EWOADV+/hQEBACH5BAUAAP8ALAAAAAAQABAAAAjcAP8JFGgj1YYrYGYM" +
"XPivggMMLkxYANODAsN//pLEwFBkA6ESnyiEQTLQ3wJ/IzD8MRVHzikaW7aoEKhj" +
"gR8HCVRZQCQEBZInKtz804HAnxqIC5l04UDmBgkEEPxdkHKRQ6MbINYgODkiwcUM" +
"JJZwybA1iQMbF5eEOBSFhJYFMRR5ZYgDR4gQQ5T8QHDJzpSFR7zsWUH4HyArCiaw" +
"mNJkxwcnRGTQOSLwFx8FY0RMmMCDRRMzbxYtGlim1QsfCmAYEcEmQo6/C4N0CMKn" +
"wWkJMB5dFHjiRJbZVBgGBAAh+QQFAAD/ACwAAAAAEAAQAAAIuQD/CRToT9AFDAk2" +
"DFz47weCBf5iOEgAy8IVhgge+qvgAEoRExY8lBiYESLHBEU2hCzRg4lAjSexgBTZ" +
"AwWFf1pMTlxYQoghGmE4KYm4kyGKGki6VCGKkuE/GiqicpAAJAKDD05btCDjplMD" +
"BVUZOIUEoiyJMy8kiLC68MGQB0syPPh3poHaHAycyNDwJcSDKBkEnuhgd21eGV6+" +
"4HgwaGApwlStfti7IgXDwYV5XJVxxKlAwmA1MwwIACH5BAUAAP8ALAAAAAAQABAA" +
"AAjVAP8JFPhjgT9BF1wMXPgPQUMIC/yMwOBiA0OBEP5NqnAHgw0sG64M1JLxn5oL" +
"UhK4+GMCTRwhApX4S/LvTkosJjZY8ABmxj8donQkgcIQESk5M+pQ4EGElRAbFwH1" +
"QEGhRgMfCnL0ucikaiYkZXwEYrHjYo0nnp5UCdJAAZspF1twmNvihB4xRnIwrJTh" +
"Rp48IP5loaJgVKQPdjTgGPJgCQgSA++OAcWAyKsVsh7gyfBgYBZMEsa+6aMhRQg8" +
"kBduQqVgQpMPdPakSHFR4AsjZC0dYRgQACH5BAUAAP8ALAAAAAAQABAAAAjRAP8J" +
"FFgBAQIdP2wMXPhPCYSHECG0GcaQBi9/GDNirNBuIA8xkjSNuEDywoiTuzYIpKLg" +
"0RxFCWLGhAIlgaN/D4womNCE0UJHJSzkMpHphQIjwFgw/KcrFBo0hCTUghHBy1JG" +
"dTx4CNVAAVUiS9cxmTGjRxlfO80spUHDEAoUE17AmLDDysIuYeYwqnHpH8sJDG5p" +
"CFGJSwsVT7pIEtg1ghkiGnCEWHODDIctWwbOreslxaEHUW40usEwAgMrGlIMipKB" +
"y5qlAp1o+DKExBeGAQEAIfkEBQAA/wAsAAAAABAAEAAACNcA/wkUqETLPwQIByr8" +
"xwPQjwUQD+rAsBATHysIYiQZCNHfwBWtFCi4pMjBiH9q/PhLkkBghxdjJtj5Z+Of" +
"FAwmYzj4xyaIDxEsmih0kQCDUSzw+CiYsGPhP1V/irhIlaXBvwmWnFowtcHElTIv" +
"jPD44FRIHEIWrvgESnahEDklwMygIoFNEydHFFJAcepTjzr/YEQwQ8QLjgfeOHRB" +
"QoMCBYFAcryRsQdHCBKNODzZEobDwCmL6KwIcWjJDTIqtiBZaObI6ChcQNxw48ap" +
"wBVDSGRYs2ZhQAA7";
}
here is sample code provided. Basically this is how it works. Unlike other image formats, Animated GIF will have setof ImageData ( like a frame in an animation). You will render this image data on Cavans with the delay that you want.
http://www.java2s.com/Code/Java/SWT-JFace-Eclipse/DisplayananimatedGIF.htm
This one is also working example code based upon Eclipse article about SWT. I was trying to make a slideshow or animated gif dialog from images located in a folder. posted here because it could be useful for somenone.
public class GifDialog extends Dialog {
Shell dialog;
private Canvas canvas;
int numberImage = 1;
private volatile boolean running = true;
final List<Image> imageCollection = new ArrayList<Image>();
DenemeDialog(Shell parent) {
super(parent);
}
public String open() {
Shell parent = getParent();
this.func();
dialog = new Shell(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL);
dialog.setSize(600, 400);
dialog.setText("Show Begins!!!");
Monitor primary = dialog.getDisplay().getPrimaryMonitor();
Rectangle bounds = primary.getBounds();
Rectangle rect = dialog.getBounds();
int x = bounds.x + (bounds.width - rect.width) / 2;
int y = bounds.y + (bounds.height - rect.height) / 2;
dialog.setLocation(x, y);
dialog.setLayout(new FillLayout());
final Canvas canvas = new Canvas(dialog, SWT.NONE);
final Image image = new Image(dialog.getDisplay(), imageCollection.get(
0).getImageData());
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener() {
public void paintControl(PaintEvent event) {
event.gc.drawImage(image, 0, 0);
}
});
Thread thread = new Thread() {
public void run() {
while (running) {
dialog.getDisplay().asyncExec(new Runnable() {
public void run() {
numberImage = numberImage == imageCollection.size() - 1
? 0 : numberImage + 1;
ImageData nextFrameData = imageCollection.get(
numberImage).getImageData();
Image frameImage = new Image(dialog.getDisplay(),
nextFrameData);
gc.drawImage(frameImage, nextFrameData.x,
nextFrameData.y);
frameImage.dispose();
canvas.redraw();
if (numberImage == 0)
try {
running = false;
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
}
};
dialog.open();
thread.start();
Display display = parent.getDisplay();
while (!dialog.isDisposed()) {
if (!display.readAndDispatch())
display.sleep();
}
return "After Dialog";
}
public void func() {
File path = new File("..\folder");
File[] files = path.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isFile()) { // this line weeds out other
// directories/folders
try {
ImageData imageData = new ImageData(
new ByteArrayInputStream(loadImage(files[i])));
final Image image = new Image(Display.getDefault(),
imageData);
imageCollection.add(image);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
public byte[] loadImage(File file) throws IOException {
BufferedImage image = ImageIO.read(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageIO.write(image, "jpg", bos);
return bos.toByteArray();
}
public Canvas getCanvas() {
return canvas;
}
public void setCanvas(Canvas canvas) {
this.canvas = canvas;
}
Note: None of the examples that rely on SWT's ImageLoader.class will work on GTK Linux SWT, as there is currently a bug that hard-codes the maximum frames to 32 and sets the delay between frames incorrectly.
See GTK ImageLoader.java
// Fix the number of GIF frames as GdkPixbufAnimation does not provide an API to
// determine number of frames.
int num_frames = 32;