Android Camera2: acquire frames from camera preview - java

I'm a beginner in Android and I have to create an app that charts in real time the images of the camera processed in RGB using Camera2.
I've created an activity where I chart the datas and a fragment for Camera2 based on google example. Well, the part about camera works but I have problems with ImageReader for processing the frames. Inside openCamera, I've open a listener for Image Reader and I've tried to create a background thread for acquiring the frame datas.
private void openCamera(int width, int height) {
// another part of code
ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Thread mythread = new Thread(new Runnable() {
#Override
public void run(){
readImage = mImageReader.acquireLatestImage();
if (readImage == null){
Log.e(TAG, "ERROR");
}
float[] val = caricadato(readImage);
try {
Thread.sleep(100); //600
} catch (InterruptedException e) {
//manage error...
}
}
});
mythread.start();}
};
mImageReader = ImageReader.newInstance(176, 144,
35, 1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
//....continue
}
The "carica dato" method extracts the green channel and makes it the mean (data to be plotted).
private float[] caricadato(Image image){
Log.i(TAG, "The onImageAvailable thread id: " + Thread.currentThread().getId());
float[] v = yuv2rgb(image);
float media = valore_medio(v);
dati_realtime[count]= media;
Log.i(TAG,"dato media"+ media);
Log.i(TAG,"dato real time "+ dati_realtime[count]);
Log.i(TAG,"VALORE CONTATORE "+count);
count++;
image.close();
return dati_realtime;
}
private float[] yuv2rgb(Image image){
ByteBuffer buffer0 = image.getPlanes()[0].getBuffer();
byte[] Y1 = new byte[buffer0.remaining()];
buffer0.get(Y1);
ByteBuffer buffer1 = image.getPlanes()[1].getBuffer();
byte[] U1 = new byte[buffer1.remaining()];
buffer1.get(U1);
ByteBuffer buffer2 = image.getPlanes()[2].getBuffer();
byte[] V1 = new byte[buffer2.remaining()];
buffer2.get(V1);
int Width = image.getWidth();
int Height = image.getHeight();
float[]verdi = new float[image.getHeight()*image.getWidth()];
int contatore = 0;
for(int i = 0; i<Height-1; i++){
for (int j = 0; j<Width; j++){
int Y = Y1[i*Width+j]&0xFF;
int U = U1[(i/2)*(Width/2)+j/2]&0xFF;
int V = V1[(i/2)*(Width/2)+j/2]&0xFF;
U = U-128;
V = V-128;
float R,G,B;
R = (float)(Y + 1.402 * V);
G = (float)(Y - 0.34414 * U - 0.71414 * V);
B = (float) (Y + 1.772 * U);
verdi[contatore]= (byte)G;
contatore++;
}
}
return verdi;
}
The error at start and Resume is often :
E/AndroidRuntime: FATAL EXCEPTION: Thread-5 Process: com.example.francesca.beatandgo, PID: 22610
java.lang.IllegalStateException: maxImages (1) has already been acquired, call #close before acquiring more.
at android.media.ImageReader.acquireNextImage(ImageReader.java:398)
at android.media.ImageReader.acquireLatestImage(ImageReader.java:283)
at com.example.francesca.beatandgo.camera.Camera2VideoFragment$3$1.run(Camera2VideoFragment.java:506)
at java.lang.Thread.run(Thread.java:776)
Camera2VideoFragment.java:506 is readImage = mImageReader.acquireLatestImage(); in openCamera.
How can I resolve this problem? How can I pass data to mainThread into Activity?

Related

Signal 7 (SIGBUS), code 1 (BUS_ADRALN), fault addr

I'm trying to do stitching on android. I used Surf algorithm to serve as finder and I got an error like "try enable OPENCV_ENABLED_NONFREE". I use C++ native code on Android Studio and do stitching in background. I solved the surf error but now, when I test this application on real device, I get this error:
I did some research in another forum, and they say that the problem may be caused by a function that doesn't return the value it's supposed to return. Is there anyone who can help me please. This is the code:
private Handler handler = new Handler(new Handler.Callback() {
#Override
public boolean handleMessage(Message message) {
switch (message.what) {
case HANDLER_START_STITCHING :
{
new Thread()
{
public void run()
{
AsyncTask.execute(new Runnable() {
#Override
public void run() {
String[] source=getDirectoryFilelist("null");
final File resultDir = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + File.separator + "viewwer");
// if (!resultDir.exists())
// resultDir.mkdir();
final String stitchingResultImagePath = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath()) +"/result.jpg"; // + (ANGLE-90) + ".jpg";
if( NativePanorama.jniStitching(source, stitchingResultImagePath, STITCH_IMAGE_SCALE) == 0 )
{
handler.sendMessage(handler.obtainMessage(HANDLER_SET_STITCHING_BUTTON_TEXT,"Stitching success"));
File image = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath());
File result_90 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result90.jpg");
File result_180 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result180.jpg");
File result_270 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result270.jpg");
File result_360 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result360.jpg");
Log.d("GESTION_STITCHING", result_180.toString());
/*if (ANGLE == 450) {
handler.sendMessage(handler.obtainMessage(HANDLER_FINISH_STITCHING,"Stitching success"));
}*/
if (image.exists()) {
File[] files = image.listFiles();
for (int i=0;i<files.length; i++) {
if (files[i].compareTo(result_90) == 0 || files[i].compareTo(result_180) == 0 || files[i].compareTo(result_270) == 0 || files[i].compareTo(result_360) == 0) {
} else {
files[i].delete();
}
}
}
}
else
{
handler.sendMessage(handler.obtainMessage(HANDLER_SET_STITCHING_BUTTON_TEXT,"Stitching error"));
}
}
});
}
}.start();
break;
}
and the following is the native C++ code:
JNIEXPORT jint JNICALL Java_com_priscilla_viewwer_utils_NativePanorama_jniStitching(JNIEnv *env, jclass clazz, jobjectArray source, jstring result, jdouble scale) {
clock_t beginTime, endTime;
double timeSpent;
beginTime = clock();
//init jni call java method
int i = 0;
bool try_use_gpu = true;
vector<Mat> imgs;
Mat img;
Mat img_scaled;
Mat pano;
Mat pano_tocut;
Mat gray;
const char* result_name = env->GetStringUTFChars(result, JNI_FALSE); //convert result
LOGE("result_name=%s",result_name);
LOGE("scale=%f",scale);
int imgCount = env->GetArrayLength(source); //img count
LOGE("source imgCount=%d",imgCount);
for(i=0;i<imgCount;i++)
{
jstring jsource = (jstring)(env->GetObjectArrayElement(source, i));
const char* source_name = env->GetStringUTFChars(jsource, JNI_FALSE); //convert jsource
LOGE("Add index %d source_name=:%s", i, source_name);
img=imread(source_name);
Size dsize = Size((int)(img.cols*scale),(int)(img.rows*scale));
img_scaled = Mat(dsize,CV_32S);
resize(img,img_scaled,dsize);
imgs.push_back(img_scaled);
env->ReleaseStringUTFChars(jsource, source_name); //release convert jsource
}
img.release();
pano = stitchingImages(imgs);
for(i=0;i<imgs.size();i++)
{
imgs[i].release();
}
//cut black edges
//LOGE("stitching success,cutting black....");
pano_tocut = pano;
cvtColor(pano_tocut, gray, CV_BGR2GRAY);
Rect startROI(0,0,gray.cols,gray.rows); // start as the source image - ROI is the complete SRC-Image
cropLargestPossibleROI(gray,pano_tocut,startROI);
gray.release();
imwrite(result_name, pano_tocut);
pano.release();
pano_tocut.release();
env->ReleaseStringUTFChars(result, result_name); //release convert result
endTime = clock();
timeSpent = (double)(endTime - beginTime) / CLOCKS_PER_SEC;
LOGE("success,total cost time %f seconds",timeSpent);
// env->CallVoidMethod(clazz, javaMethodRef, timeSpent);
return 0;
}
I came cross this issues either, and I found it was violating strict aliasing.
cause by casting~
problem:
uint32_t i32 = *((uint32_t*)m_data);
solution:
uint32_t i32 = 0;
char* p = (char*)&i32;
for(int i =0;i < 4;i++)
{
p[i] = m_data[i];
}

Audio merging using OpenSL ES Android

I am trying to record my vocals and then merge them with an audio file together using OPENSL ES Library. I found this GitHub sample called Native-Audio. It merges the two audios. But the background audio file is playing much faster than the actual rate in the final output.
Please use headphones to notice the difference.
Samples Links: Before and After
Also, it uses the files from the assets folder only. How can I manually select MP3 files from file manager?
private void mixAudio(){
try {
if (!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) ||
!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED))
{
// Show rationale and request permission.
ActivityCompat.requestPermissions(this,
new String[]{android.Manifest.permission.READ_EXTERNAL_STORAGE, android.Manifest.permission.WRITE_EXTERNAL_STORAGE},
1000);
}
else {
buttonMix.setEnabled(false);
buttonMix.setText("MIXING....");
textViewMixPath.setText("");
buttonPlay.setEnabled(false);
buttonRecord.setEnabled(false);
buttonStart.setEnabled(false);
listView.setEnabled(false);
Thread thread = new Thread(new Runnable() {
#Override
public void run() {
try{
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
//File file = new File(baseDir + "/mix.wav");
String path = Environment.getExternalStorageDirectory().getPath() + "/VocalRecorder";
File fileParent = new File(path);
if (!fileParent.exists()){
fileParent.mkdir();
}
final File file = new File(fileParent.getPath() + "/mix.wav");
//String author = getApplicationContext().getPackageName() + ".provider";
//Uri videoUri = FileProvider.get(this, author, mediaFile);
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//MediaMuxer muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
String beat = beats[selectedBeat];
//beat = beat.replace(".wav", ".mp3");
AssetFileDescriptor afd = getAssets().openFd(beat);
MediaCodec codec = null;
//ByteBuffer outputBuffer;
//short[] data; // data for the AudioTrack playback
//int outputBufferIndex = -1;
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
final long duration = Long.parseLong(durationStr);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
// right now I am pointing to a URI but I have tested that both will
// play the media file using MediaPlayer
int sampleRate = 0;
int numChannels = 0;
int dstIndex = -1;
int numTracks = extractor.getTrackCount(); //This says 1
for (int i = 0; i < numTracks; ++i) { // so this will just run once
MediaFormat format = extractor.getTrackFormat(i); // getting info so it looks good so far
String mime = format.getString(MediaFormat.KEY_MIME); // "audio/mpeg"
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, null, 0);
//format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
//dstIndex = muxer.addTrack(format);
//writer.setFrameRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
//writer.setSamplesPerFrame(format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
//writer.setBitsPerSample(16);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
break;
}
}
// Calculate the number of frames required for specified duration
long numFrames = (long)(duration * sampleRate/1000);
// Create a wav file with the name specified as the first argument
WavFile wavFile = WavFile.newWavFile(file, numChannels, numFrames, 16, sampleRate);
if (codec == null) {
throw new IllegalArgumentException("No decoder for file format");
}
//ByteBuffer[] inputBuffers = decoder.getInputBuffers();
//ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
/*
Boolean eosReceived = false;
while (!eosReceived) {
int inIndex = decoder.dequeueInputBuffer(1000);
if (inIndex >= 0) {
ByteBuffer buffer = decoder.getInputBuffer(inIndex);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to mDecoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
int outIndex = decoder.dequeueOutputBuffer(info, 1000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
//outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = decoder.getOutputFormat();
Log.d("DecodeActivity", "New format " + format);
//audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outBuffer = decoder.getOutputBuffer(outIndex);
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer);
final byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
//audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
decoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
}
*/
short recordedData[] = recordedData();
int recordMixStartIndex = -1;
//muxer.start();
codec.start();
Boolean sawInputEOS = false;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo infoMux = new MediaCodec.BufferInfo();
int count = 0;
while (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_US);
Log.i(LOG_TAG, "inputBufIndex : " + inputBufIndex);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffer(inputBufIndex);
int sampleSize = extractor.readSampleData(dstBuf, 0);
Log.i(LOG_TAG, "sampleSize : " + sampleSize);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.i(LOG_TAG, "Saw input end of stream!");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
Log.i(LOG_TAG, "presentationTimeUs " + presentationTimeUs);
}
codec.queueInputBuffer(inputBufIndex,
0, //offset
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
Log.i(LOG_TAG, "extractor.advance()");
extractor.advance();
}
}
final int res = codec.dequeueOutputBuffer(info, TIMEOUT_US);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
//final byte[] chunk = new byte[info.size];
//buf.get(chunk); // Read the buffer all at once
short[] shortArray = new short[info.size/2];
buf.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArray);
buf.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
if (shortArray.length > 0) {
//mAudioTrack.write(chunk, 0, chunk.length);
//infoMux.presentationTimeUs = info.presentationTimeUs;
//infoMux.flags = info.flags;
//muxer.writeSampleData(dstIndex, ByteBuffer.wrap(chunk),
// infoMux);
long []longData = new long[shortArray.length];
// Merge data with vocal
// Calculate the time
final long bufferTimer = info.presentationTimeUs/1000;
int vocalCount = 0;
for (int i = 0; i < shortArray.length; i ++) {
//writer.writeShortLittle(shortArray[i]);
long offsetTime = i*1000/(sampleRate*2); // 2 channels
Boolean mixed = false;
if ((offsetTime + bufferTimer > recordStartTime) && (offsetTime + bufferTimer <= recordStopTime + 500)){
if (recordMixStartIndex == -1){
recordMixStartIndex = 0;
}
if (recordMixStartIndex < recordedData.length){
//Log.i("TAG", "############ mix record data: " + recordMixStartIndex);
longData[i] = TPMixSamples((int)(recordedData[recordMixStartIndex]), (int)shortArray[i]/3);
if (vocalCount >= 3) {
recordMixStartIndex++;
vocalCount = 0;
}
else{
vocalCount ++;
}
mixed = true;
}
}
else {
// All done, set sawInputEOS to stop mixing
if (bufferTimer > recordStopTime + 500){
sawInputEOS = true;
}
}
if (!mixed) {
longData[i] = shortArray[i];
}
}
Log.i("TAG", "############ write frames: " + longData.length/2);
wavFile.writeFrames(longData, longData.length/2);
count ++;
if (count % 5 == 0){
runOnUiThread(new Runnable() {
#Override
public void run() {
long percent = bufferTimer*100/duration;
buttonMix.setText("MIXING..." + percent + "%");
}
});
}
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawInputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//codecOutputBuffers = codec.getOutputBuffers();
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
Log.d(LOG_TAG, "Output format has changed to " + oformat);
//mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
// Close the wavFile
wavFile.close();
// muxer.stop();
// muxer.release();
codec.stop();
codec.release();
extractor.release();
runOnUiThread(new Runnable() {
#Override
public void run() {
buttonMix.setText("MIX DONE");
buttonPlay.setEnabled(true);
buttonRecord.setEnabled(true);
textViewMixPath.setText(file.getPath());
buttonStart.setEnabled(true);
listView.setEnabled(true);
}
});
}
catch (Exception e){
}
}
});
thread.start();
}
}
catch (Exception e){
e.printStackTrace();
}
}
private final int INT16_MIN = - 32768;
private final int INT16_MAX = 32767;
private long TPMixSamples(int a, int b) {
if (a > INT16_MAX) {a = INT16_MAX;}
if (a < INT16_MIN) {a = INT16_MIN;}
return
// If both samples are negative, mixed signal must have an amplitude between the lesser of A and B, and the minimum permissible negative amplitude
a < 0 && b < 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MIN) :
// If both samples are positive, mixed signal must have an amplitude between the greater of A and B, and the maximum permissible positive amplitude
( a > 0 && b > 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MAX)
// If samples are on opposite sides of the 0-crossing, mixed signal should reflect that samples cancel each other out somewhat
:
a + b);
}
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native void createBufferQueueAudioPlayer(int sampleRate, int samplesPerBuf);
/////
public static native boolean createAssetAudioPlayer(AssetManager assetManager, String filename);
// true == PLAYING, false == PAUSED
public static native void setPlayingAssetAudioPlayer(boolean isPlaying);
public static native int getDurationAssetAudioPlayer();
public static native int getCurrentPositionAssetAudioPlayer();
//////
public static native boolean createUriAudioPlayer(String uri);
public static native void setPlayingUriAudioPlayer(boolean isPlaying);
public static native void setLoopingUriAudioPlayer(boolean isLooping);
public static native void setChannelMuteUriAudioPlayer(int chan, boolean mute);
public static native void setChannelSoloUriAudioPlayer(int chan, boolean solo);
public static native int getNumChannelsUriAudioPlayer();
public static native void setVolumeUriAudioPlayer(int millibel);
public static native void setMuteUriAudioPlayer(boolean mute);
public static native void enableStereoPositionUriAudioPlayer(boolean enable);
public static native void setStereoPositionUriAudioPlayer(int permille);
public static native boolean selectClip(int which, int count);
public static native void stopClip();
public static native boolean enableReverb(boolean enabled);
public static native boolean createAudioRecorder();
public static native void startRecording();
public static native void stopRecording();
public static native void pauseRecording();
public static native void resumeRecording();
public static native short[] recordedData();
public static native double recordedDuration();
public static native void shutdown();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-audio-jni");
}

How to record continously with Androids AudioRecord?

I try to record with AudioRecord, the problem I think I have is, that the read method recall is to fast, means that the audioBuffer has overlapped data. How can I process the buffer as soon as this is full? And that continously.
Thank you for every answer.
public class Record {
/**
* TAG for "Android monitor" while debugging
*/
private static final String LOG_TAG = Record.class.getSimpleName();
/**
* calling activity needed for callback functions.
*/
private MainActivity mActivity;
// ???
private final int SAMPLE_RATE = 48000; // Samples per s
private final int BAUD_RATE = 8000; // Symbols per s
private final double T_SYMBOL = 1/(double) BAUD_RATE; // Symbol period in s
private final int NUM_SAMPLES = (int) (T_SYMBOL * SAMPLE_RATE); // Number of samples per symbol
private final int MAX_FRAMES = 3; // Maximum possible number of frames receiving in one sequence
private final int SYMBOLS_PER_FRAME = 10 * 2; // 10 from Uart and 2 because Manchester
private final int MIN_BUFFER_SIZE = MAX_FRAMES * SYMBOLS_PER_FRAME * NUM_SAMPLES;
private final short POS_THRESHOLD = 25000;
private final short NEG_THRESHOLD = -POS_THRESHOLD;
// ???
/**
* Record handling
*
* #param activity the calling activity
*/
public Record(MainActivity activity) {
mActivity = activity;
}
// ========================================================================================== //
/**
* Record the sensor data from the MIC Channel.
*/
public void recordData(){
new Thread(new Runnable() {
#Override
public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
// Buffer size in bytes
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize == AudioRecord.ERROR ||
bufferSize == AudioRecord.ERROR_BAD_VALUE ||
bufferSize < MIN_BUFFER_SIZE) {
bufferSize = MIN_BUFFER_SIZE;
}
short[] audioBuffer = new short[bufferSize / 2];
AudioRecord record = new AudioRecord(MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
if (record.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(LOG_TAG, mActivity.getString(R.string.log_ar_not_init));
return;
}
record.startRecording();
Log.i(LOG_TAG, mActivity.getString(R.string.log_start_recording));
int firstSample = 0;
boolean firstSampleFound = false;
final short[] sample = new short[MIN_BUFFER_SIZE];
short[] tempBuffer = new short[2 * bufferSize];
while (mActivity.getSupplySensor()) {
record.read(audioBuffer, 0, audioBuffer.length);
// Check if data are in the audio buffer (find first sample).
// When yes, copy the audio buffer in the first part of the temp buffer.
// Read the audio Buffer again.
// Copy the audio buffer in the second part of the temp buffer.
if(!firstSampleFound) {
for (int i = 0; i < audioBuffer.length; i++) {
if ((audioBuffer[i] > POS_THRESHOLD) && !firstSampleFound) {
//|| audioBuffer[i] < NEG_THRESHOLD
firstSample = i;
firstSampleFound = true;
arraycopy(audioBuffer, 0, tempBuffer, 0, audioBuffer.length);
}
}
} else {
arraycopy(audioBuffer, 0, tempBuffer, audioBuffer.length, audioBuffer.length);
arraycopy(tempBuffer, firstSample, sample, 0, MIN_BUFFER_SIZE);
firstSampleFound = false;
firstSample = 0;
// Begin: Used for debugging
for (int i = 0; i < sample.length; i++) {
String el = Integer.toString(i);
String elVal = Integer.toString(sample[i]);
Log.e("sample", "not zero " + el + " " + elVal);
}
// Clear audio buffer
for(int i = 0; i < audioBuffer.length; i++){
audioBuffer[i] = 0;
}
// End: Used for debugging
if(mActivity.getSupplySensor()) {
mActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
mActivity.processData(sample);
}
});
}
}
}
record.stop();
record.release();
Log.i(LOG_TAG, mActivity.getString(R.string.log_stop_recording));
/*
// Begin: Used for debugging
for (int i = 0; i < sample.length; i++) {
String el = Integer.toString(i);
String elVal = Integer.toString(sample[i]);
Log.e("sample", "not zero " + el + " " + elVal);
}
// End: Used for debugging
*/
}
}).start();
}
}

Custom OpenVR driver, with jittery android rotation

I'm working on a simple (I thought) OpenVR driver that sends the compositors output to my android phone to display with google cardboard. I've got the basics working but when my driver receives the rotation data from my android device it's very noisy and with my phone sat still on my desk it jitters loads.
the floats are being sent over in network byte order.
Here is the code receiving the rotation data:
float BytesToFloat(unsigned char* buffer, int start)
{
float f = (buffer[start] << 24) | (buffer[start + 1] << 16) | (buffer[start + 2] << 8) | buffer[start + 3];
return f;
}
void MobileDeviceReceiver::PoseThread(std::function<void(MobileVector*)> PoseUpdateCallback)
{
sockaddr_in client;
int inLen,clientStructLen = sizeof(client);
MobileVector currentPose;
DriverLog("started pose tracking thread %d\n",m_isActive);
bool errorLastCall = false;
char pchBuffer[65535];
while (m_isActive)
{
if ((inLen = recvfrom(m_socket, pchBuffer, 65535, 0, (sockaddr*)&client, &clientStructLen)) == SOCKET_ERROR)
{
if (errorLastCall == false)
{
DriverLog("Error receiving data: %d\n", WSAGetLastError());
}
errorLastCall = true;
}
else {
errorLastCall = false;
}
currentPose.x = floorf(BytesToFloat((unsigned char*)pchBuffer, 0)*10000)/10000;
currentPose.y = 0;//BytesToFloat((unsigned char*)pchBuffer, 4);
currentPose.z = 0;//BytesToFloat((unsigned char*)pchBuffer, 8);
PoseUpdateCallback(&currentPose);
}
}
And the code sending it from the phone:
class Task implements Runnable
{
public Queue<DatagramPacket> packets = new LinkedList<DatagramPacket>();
private boolean isActive = true;
#Override
public void run() {
try{
DatagramSocket socket = new DatagramSocket();
while(isActive)
{
if(packets.size() > 0)
{
socket.send(packets.remove());
}
}
}catch(Exception e)
{
e.printStackTrace();
}
}
public void Kill()
{
isActive = false;
}
}
ByteBuffer buffer = ByteBuffer.allocate(12);
protected float ALPHA = 0.05f;
private float[] lowPassFilter(float[] input,float[] output)
{
if(output == null) return input;
for ( int i=0; i<input.length; i++ ) {
output[i] = output[i] + ALPHA * (input[i] - output[i]);
}
return output;
}
#Override
public void onSensorChanged(SensorEvent sensorEvent) {
if(sensorEvent.sensor == sRotation)
{
float x,y,z;
x = (float) Math.toRadians(sensorEvent.values[0]);
y = (float) Math.toRadians(sensorEvent.values[1]);
z = (float) Math.toRadians(sensorEvent.values[2]);
float[] values = lowPassFilter(new float[]{x,y,z},new float[3]);
buffer.order(ByteOrder.BIG_ENDIAN);
buffer.putFloat(values[0]);
buffer.putFloat(values[1]);
buffer.putFloat(values[2]);
try {
DatagramPacket packet = new DatagramPacket(buffer.array(), 12, InetAddress.getByName(IP), 8888);
if(task != null) {
task.packets.add(packet);
}
}catch(Exception e)
{
e.printStackTrace();
}
buffer.clear();
}
}
Thanks in advance
For future reference I was doing two things wrong:
I was sending the float in network byteorder and not rearranging it to native byte order on the local machine.
Bitshifts don't seem to work for floats, so I used memset instead.
After changing these two things it all worked perfectly.

JAVA SWT Animated GIF

I need to add animated gif in form of spinner, into some SWT widget like for example Label. This Label will be as default labelSpiner.setEnabled(false). When I start some long duration operation this Label with spinner will show (labelSpinner.setEnabled(true))in right corner of window.
Normaly Image can be added to Label by labelSpinner.setImage(arg0)
If I add this SPINNER.GIF normal way into Label, it wont animate, its only static Image.
Does anybody know how to add animated gif (for example some spinner) into SWT based JAVA SE app widget? I browsed many examples but many of them were useless, and those good were too complicated.
I would like to do it very simple.
Can somebody help?
Did you try the Eclipse article about SWT Images?
This part will load the image and display it in a Canvas:
ImageLoader loader = new ImageLoader();
loader.load(getClass().getResourceAsStream("Idea_SWT_Animation.gif"));
Canvas canvas = new Canvas(shell,SWT.NONE);
image = new Image(display,loader.data[0]);
int imageNumber;
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener(){
public void paintControl(PaintEvent event){
event.gc.drawImage(image,0,0);
}
});
And this part updates the gif:
Thread thread = new Thread(){
public void run(){
long currentTime = System.currentTimeMillis();
int delayTime = loader.data[imageNumber].delayTime;
while(currentTime + delayTime * 10 > System.currentTimeMillis()){
// Wait till the delay time has passed
}
display.asyncExec(new Runnable(){
public void run(){
// Increase the variable holding the frame number
imageNumber = imageNumber == loader.data.length-1 ? 0 : imageNumber+1;
// Draw the new data onto the image
ImageData nextFrameData = loader.data[imageNumber];
Image frameImage = new Image(display,nextFrameData);
gc.drawImage(frameImage,nextFrameData.x,nextFrameData.y);
frameImage.dispose();
canvas.redraw();
}
});
}
};
shell.open();
thread.start();
After trying at least 3 different animated GIF examples, none of which worked, I started working on my own, based mainly on the answer above.
Here is a complete running example including:
a base64 decoder (courtesy of http://www.source-code.biz/base64coder/java/ )
spinner GIF
main method
Remove the main method, base64 methods, and image data, and you will have a working animated GIF canvas.
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.ImageLoader;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
public class AnimatedGif extends Canvas {
public static void main(String[] args) throws IOException {
byte[] d = decode(b64.toCharArray(), 0, b64.length());
Display display = new Display ();
Shell shell = new Shell(display);
shell.setBounds(0, 0, 100, 100);
AnimatedGif gif = new AnimatedGif(shell, SWT.NONE);
gif.setLocation(10,10);
gif.setSize(16, 16);
gif.load(new java.io.ByteArrayInputStream(d));
shell.open();
gif.animate();
while (!shell.isDisposed ()) {
if (!display.readAndDispatch ()) display.sleep ();
}
display.dispose ();
}
private final ImageLoader loader = new ImageLoader();
private int img = 0;
private volatile boolean animating = false;
private Thread animateThread;
public AnimatedGif(Composite parent, int style) {
super(parent, style);
}
public void load (InputStream resource) throws IOException {
loader.load(resource);
}
public void animate() {
if (animateThread == null) {
animateThread = createThread();
animateThread.setDaemon(true);
}
if (animateThread.isAlive())
return;
animateThread.start();
}
public void stop() {
animating = false;
if (animateThread != null)
try {
animateThread.join();
animateThread = null;
} catch (InterruptedException e) {
// do nothing
}
}
private Thread createThread() {
return new Thread() {
long currentTime = System.currentTimeMillis();
final Display display = getParent().getDisplay();
public void run() {
animating = true;
while(animating) {
img = (img == loader.data.length-1) ? 0 : img + 1;
int delayTime = Math.max(50, 10*loader.data[img].delayTime);
long now = System.currentTimeMillis();
long ms = Math.max(currentTime + delayTime - now, 5);
currentTime += delayTime;
try {
Thread.sleep(ms);
} catch(Exception e) {
return;
}
if (!display.isDisposed())
display.asyncExec(new Runnable() {
#Override
public void run() {
ImageData nextFrameData = loader.data[img];
Image frameImage = new Image(display, nextFrameData);
new GC(AnimatedGif.this).drawImage(frameImage, nextFrameData.x, nextFrameData.y);
frameImage.dispose();
//canvas.redraw();
}
});
}
Display.getDefault().asyncExec(new Runnable() {
#Override
public void run() {
new GC(AnimatedGif.this).fillRectangle(
0,
0,
getBounds().width,
getBounds().height);
}
});
}
};
}
private static final char[] map1 = new char[64];
static {
int i = 0;
for (char c = 'A'; c <= 'Z'; c++)
map1[i++] = c;
for (char c = 'a'; c <= 'z'; c++)
map1[i++] = c;
for (char c = '0'; c <= '9'; c++)
map1[i++] = c;
map1[i++] = '+';
map1[i++] = '/';
}
private static final byte[] map2 = new byte[128];
static {
for (int i = 0; i < map2.length; i++)
map2[i] = -1;
for (int i = 0; i < 64; i++)
map2[map1[i]] = (byte) i;
}
public static byte[] decode(char[] in, int iOff, int iLen) {
if (iLen % 4 != 0)
throw new IllegalArgumentException(
"Length of Base64 encoded input string is not a multiple of 4.");
while (iLen > 0 && in[iOff + iLen - 1] == '=')
iLen--;
int oLen = (iLen * 3) / 4;
byte[] out = new byte[oLen];
int ip = iOff;
int iEnd = iOff + iLen;
int op = 0;
while (ip < iEnd) {
int i0 = in[ip++];
int i1 = in[ip++];
int i2 = ip < iEnd ? in[ip++] : 'A';
int i3 = ip < iEnd ? in[ip++] : 'A';
if (i0 > 127 || i1 > 127 || i2 > 127 || i3 > 127)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int b0 = map2[i0];
int b1 = map2[i1];
int b2 = map2[i2];
int b3 = map2[i3];
if (b0 < 0 || b1 < 0 || b2 < 0 || b3 < 0)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int o0 = (b0 << 2) | (b1 >>> 4);
int o1 = ((b1 & 0xf) << 4) | (b2 >>> 2);
int o2 = ((b2 & 3) << 6) | b3;
out[op++] = (byte) o0;
if (op < oLen)
out[op++] = (byte) o1;
if (op < oLen)
out[op++] = (byte) o2;
}
return out;
}
static String b64 =
"R0lGODlhEAAQAPf/AMra5KO9zOzz9s3b5MDS3aS+zZOwwsrZ4+bt89jm7IKftuLr" +
"8ZCswH6bs9vn7qrD0+Ts8oqnvYKetomlu8PV4t7p79Pg6dzp79vm7q/F1aC6zdTi" +
"6bbL2XqYsdDf55SwxK/H1ajC0Yajudzp7q7E087c59Xi6niWr8XY4qW/0LnM2aO9" +
"zoyov7bK2djk7ICbtIOhtt7p7pq1yc3b5sDU38DR39vm7LDH1afA0Yqovebu85Gt" +
"wouovsvZ5oCdtunv9IaiuXyZscrb5qnC0Zezxdjj64WiuZ65y7vQ3N/p8OLq8avE" +
"1MTX4o6rwJaxxLnN29jm7a7F1dvm7Y2qwH2bs7zQ3Jizx9Df6djj7HmXr+Tt8rvO" +
"26/E1LrO3KC5zKW+z8/e577Q3oCctYShubTK2HyasZGtwHuYsdTg6enw89/p76zE" +
"04iku+Lr8LXJ2JKvw+Xt8tHf583b57vP3pu2ycjY45m0yNvp7qzE1bHI13qZsaK8" +
"zn2aspq0yODp8Nfi6svb5oaiuuDp76vE09Hf6ZWzxMfY4qnC087e5+rx9eLs8pSv" +
"xL/R3rPI2Nfj64ajuq7E1Y2pv7rQ2+Lp8J65yarC1JKvxLzR3nybsrzQ3unw9HmY" +
"sLvR3LPI17nO28rY5Iqlvc3c5urx9Iakus/f53iVrtPi6cnY436btNTi6tfi67PK" +
"1rHI2H6bspy3yJ63ydXi7JezxKjA0ZGuwNjk64CbtazE0Zezx8DR3NXi68TX4Nvn" +
"7e7097vP24CctH2atIqlvN7o7tPh6Nzn7JGvwK/F1MbX4dHg6MfY4b7S3c/e5n2b" +
"srjN2ezy9eDq78rb4svb473R2+Ts8cHV3r7R3eXu8rfN2JSvwo+tv9/p7pSxw6C7" +
"ytbj6cTX36zD1NHg5vL3+KnB0Onx9cPV35y4yLPJ1dnl7LnO2cHU3avD0bnP2sDV" +
"4pKvwdfk7O7z99Xi6XqXsNTi6N3o7sjY4ubv8s7c5qvE0env842svuLs8Z64yJm0" +
"xZq2x46svuLq8P///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQFAAD/ACwAAAAAEAAQ" +
"AAAIvQD/CRR4poEEf0q0DFz478SZFwqAxPC3AAEChqU6GAQSwcHEihcFntAoQUQE" +
"BgkcVKCI4IdAkiYZfCiScmXFfzBzMFhYk6K/jScZ/uspSEFMJ0KxpIxxAQgPBk5k" +
"CDVRBIoDDE8/yNAg1MIGmglkyvDy5cHCEh4smIC14d/WLyGGQGpBA4UQtBYsCDyy" +
"AseDByBaqKhhqAfaKwNTPIiyBAQZFUjqGmY4KEMGEG5UdAlDgYlQgQ9IdOJQhRPD" +
"gAAh+QQFAAD/ACwAAAAAEAAQAAAI1gD/CRR4IkiZBjx0DFwoMIueBj58EBGlRAtD" +
"h1TEKAikgJUOfxAWbsKk0QgbFjmEJEkiEIFAVBLGjMoxZUcfG1AEhvz3YiOoSAz/" +
"3VHzD8EPIxNYMPgQVMqFSRAWsGjyhoidoAmkVFjgb8eHPq80BMWS4I4fQZboaFiB" +
"I6gJFxhGXDiyJ4WsIZUWItrwxwYGF/9ShHjwIEOLGkwAkbJgAgvgwHjwLLnB4QmF" +
"HnI8oNmwYSCJDCDycPBUA8UMMHGuMHxAQvSTTBTqzBASVCCIFlWQ1KDAMCAAIfkE" +
"BQAA/wAsAAAAABAAEAAACMsA/wkUeKZDmSANfAxc+K8BlRe+FNRSMIbKMoYwFCjI" +
"CAOGEY1iqgyMMIHkhJIRgE14JImXEoEMdpiJyYBBpBxN5mjyB+GfEytWiNyyspCR" +
"ohE8f2jQ4GUpw38JLvBE8CVFChxDnkadOmTQoRCQtEqFoKNYlAdrMohNyiVDlBst" +
"5ix0lAAphAX/uNwgoyIMDQpMdJWAgrSNQC6NODyZQ4NJnVAWCFeYNLDFli6MDM3w" +
"gCZXgl0XGFaRVAPFZjQmHEl5KhCaaQ+EKDAMCAAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"2wD/CRRIJUiHLCdODFz47xEMCT5eNOBjMAjDKTkisBFh5J+CiK3KDFy06I2ZJix4" +
"TJggYowCPr8EHqEjg4iTDzuaTGExQYEVQP9WCN3j5cjCKXYuIfihZEiIEDhwMBSo" +
"KMYCLSSiHAqxZKoNB0kW/MvAZQmJDFP/jfi3AMEaEDcacZgqZSACEjfIcOjCZKEL" +
"DGr+IdDxz42KJ0hQCEFkQVUCB34WEP6nYssWGqfkxDH1B8MIfwv8DUQShsKnEoQ2" +
"FMEQI4lohhR6gLFg4q+DCmn/zQBzZUMqGwwDAgAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"uAD/CRTIgAcQBQ06DFz474iMDwVFSEh4gmGKFRoeMogAZGKHUgMHPcDxxYsMJxsl" +
"UhSYIcqDEF8yosyh8sy/BxmWPBjyYGFKCS/OkABBFBLDfxsPNujkhkyLFkchcpTA" +
"QYVVGkeLJHAQw1+VLkhqoDi6tasSTmFoGBJSYuHWCv4WaPlHAUWPEh4smMDyNi4C" +
"gUzu5t2g1QHcBQj+CsSrtwgUw34VD7xiAVZZvz+OCtyQAMMFQf4YBgQAIfkEBQAA" +
"/wAsAAAAABAAEAAACNYA/wkUeMTSDhZGXgxc+C9Fij10PjSZoADVJoYk8IRIoaHP" +
"GxaBJGDKMvBBBjwPZK14RYQBqDFi9AwkAWLJgyE4NNj5EGmUAiokQeTJcyNDJYY5" +
"jMQ80YKD0xYM/01ho6BBkCpPPD2pEfVgIB9lkGSqQYFJ1D45FPhoQBZFD0BRbQhh" +
"RYQHhToz5JBCxBBKEh2idPybAcaDhQ0msCSQcudfEn9KBAqJg8bEHxeLL6gRCEHL" +
"wCsbsNjAcKfCpH8Qov7b4ALDCD8LICD4N1u1iwuC/C34wTAgACH5BAUAAP8ALAAA" +
"AAAQABAAAAjNAP8JFMil2JAvGpwMXPiPC5cMUQal0GCFQQSGLRrdiPLgUAovVnZM" +
"gDGwyhYOZG6sCYFDAxEzERQ0ECipyxMVLTJAGqLhFoMJCqj8o1CD0ZwwcxaGHPni" +
"DAoUhmjQYPjvpwJfHWZoZUKBKkwYCsp4GFunK8NIEWDUCkIIDZpQuqjmAGZEJgUT" +
"uSyUcLSQUROgY8b8c5QACpQEiBErmvMoqEApu0ZIvkD5wghNksQsG3ihgr/PoD/z" +
"qsJwUhsIqFNDUEJV4IIfOhAg+MEwIAAh+QQFAAD/ACwAAAAAEAAQAAAI1wD/CRRI" +
"Yk0GEkNWDFz4T4WbGyC4RAmx4sgihki2qCBzY8khinQWTRlIIcyWJxwakQiBY4+M" +
"NzkeCaRAgQaSLhwyLMHhhYiZCDD+zejx6RQKJguPOGnCRgIVMGBKyBHC8N8HFiJ8" +
"BLligVAcRFV38DDyosMGExtMWajaZMK/BllSuSjyR1XVKRMU8DlhA4PfBC4WTsGq" +
"9cQ/BzFGOMAg5Z+Nf3YmjCkrsEISf37U/FOs6JICBa2CDPS3YMHAJDEQWOFThqE/" +
"HQj+lV7wA9CvqgJhx9aihGFAACH5BAUAAP8ALAAAAAAQABAAAAjAAP8JFMipCodO" +
"JB4MXPiPCYUwXVS4AZEhwyCGJXqgoIFEBRkQS6I8SDHwioeMhmqoaAHiwQMcK44I" +
"tGDhpJCNLSANCfFFg4x/G2CZqFli4YMvXmR8YJAgQZENFhj+87mUBwYHUIqYkCrD" +
"CQMeQC7EcJAAi1SvEUQoEOSvAlmpDCIAkdDA34K2bwcyyPGPbod/CO66dbo0Ld2B" +
"PwLjTRBXhN8TAxEgEDhWrt9SDCXfjQFEwYszkKX+06LEH90zDAMCACH5BAkAAP8A" +
"LAAAAAAQABAAAAjWAP8JFEihBpIqLUAMXPhPyIw6FDI94ZCHxAOGV+KAmYGihieK" +
"IDKQGLhhAxoPcnpQmHhjCR48KQS6wGLCAilATGq0yPDgQYiYLjDY+LMB0cJKQ2Sl" +
"2HPkwggMLkww/IdjhQY6lgT5uZMAy1QNr/p82OFvQQUpCabaIfKmCYsFECZdkDL1" +
"AwMWE4z8QPBPzZ2pkUAFUvBCIASBUGz02TElx6gxElAJ5PsvSRIhOViwMaJADKZN" +
"CyH408FKwWAxVPRkYahFiSgiPnw0UD11oA4eDcoEOcEwIAAh+QQJAAD/ACwAAAAA" +
"EAAQAAAIyAD/CRw4zAYtYdwGKvyXz1m2YAaF1XsGbWE1aW3UQKQVb8CBA8YG3qMH" +
"Z4E8c++OKQMwD9k4XALTpClp7tsxD8oGIOs2DaamRDPbOFMIwBi6ZARwuQMKp83C" +
"fwSiVUEqgKnThdTU9YrWq5nVp73OMWNmzWvQp6tWlUv7j6m4AQpt2Utny1a4tmnc" +
"AQhAaVuAAAUABxaoCdwzfq70UaJEzpUrSo8HQjNQqNC+WLEKZda8cNYsIoWuFTLA" +
"bhaxpwK1EWOADV+/hQEBACH5BAUAAP8ALAAAAAAQABAAAAjcAP8JFGgj1YYrYGYM" +
"XPivggMMLkxYANODAsN//pLEwFBkA6ESnyiEQTLQ3wJ/IzD8MRVHzikaW7aoEKhj" +
"gR8HCVRZQCQEBZInKtz804HAnxqIC5l04UDmBgkEEPxdkHKRQ6MbINYgODkiwcUM" +
"JJZwybA1iQMbF5eEOBSFhJYFMRR5ZYgDR4gQQ5T8QHDJzpSFR7zsWUH4HyArCiaw" +
"mNJkxwcnRGTQOSLwFx8FY0RMmMCDRRMzbxYtGlim1QsfCmAYEcEmQo6/C4N0CMKn" +
"wWkJMB5dFHjiRJbZVBgGBAAh+QQFAAD/ACwAAAAAEAAQAAAIuQD/CRToT9AFDAk2" +
"DFz47weCBf5iOEgAy8IVhgge+qvgAEoRExY8lBiYESLHBEU2hCzRg4lAjSexgBTZ" +
"AwWFf1pMTlxYQoghGmE4KYm4kyGKGki6VCGKkuE/GiqicpAAJAKDD05btCDjplMD" +
"BVUZOIUEoiyJMy8kiLC68MGQB0syPPh3poHaHAycyNDwJcSDKBkEnuhgd21eGV6+" +
"4HgwaGApwlStfti7IgXDwYV5XJVxxKlAwmA1MwwIACH5BAUAAP8ALAAAAAAQABAA" +
"AAjVAP8JFPhjgT9BF1wMXPgPQUMIC/yMwOBiA0OBEP5NqnAHgw0sG64M1JLxn5oL" +
"UhK4+GMCTRwhApX4S/LvTkosJjZY8ABmxj8donQkgcIQESk5M+pQ4EGElRAbFwH1" +
"QEGhRgMfCnL0ucikaiYkZXwEYrHjYo0nnp5UCdJAAZspF1twmNvihB4xRnIwrJTh" +
"Rp48IP5loaJgVKQPdjTgGPJgCQgSA++OAcWAyKsVsh7gyfBgYBZMEsa+6aMhRQg8" +
"kBduQqVgQpMPdPakSHFR4AsjZC0dYRgQACH5BAUAAP8ALAAAAAAQABAAAAjRAP8J" +
"FFgBAQIdP2wMXPhPCYSHECG0GcaQBi9/GDNirNBuIA8xkjSNuEDywoiTuzYIpKLg" +
"0RxFCWLGhAIlgaN/D4womNCE0UJHJSzkMpHphQIjwFgw/KcrFBo0hCTUghHBy1JG" +
"dTx4CNVAAVUiS9cxmTGjRxlfO80spUHDEAoUE17AmLDDysIuYeYwqnHpH8sJDG5p" +
"CFGJSwsVT7pIEtg1ghkiGnCEWHODDIctWwbOreslxaEHUW40usEwAgMrGlIMipKB" +
"y5qlAp1o+DKExBeGAQEAIfkEBQAA/wAsAAAAABAAEAAACNcA/wkUqETLPwQIByr8" +
"xwPQjwUQD+rAsBATHysIYiQZCNHfwBWtFCi4pMjBiH9q/PhLkkBghxdjJtj5Z+Of" +
"FAwmYzj4xyaIDxEsmih0kQCDUSzw+CiYsGPhP1V/irhIlaXBvwmWnFowtcHElTIv" +
"jPD44FRIHEIWrvgESnahEDklwMygIoFNEydHFFJAcepTjzr/YEQwQ8QLjgfeOHRB" +
"QoMCBYFAcryRsQdHCBKNODzZEobDwCmL6KwIcWjJDTIqtiBZaObI6ChcQNxw48ap" +
"wBVDSGRYs2ZhQAA7";
}
here is sample code provided. Basically this is how it works. Unlike other image formats, Animated GIF will have setof ImageData ( like a frame in an animation). You will render this image data on Cavans with the delay that you want.
http://www.java2s.com/Code/Java/SWT-JFace-Eclipse/DisplayananimatedGIF.htm
This one is also working example code based upon Eclipse article about SWT. I was trying to make a slideshow or animated gif dialog from images located in a folder. posted here because it could be useful for somenone.
public class GifDialog extends Dialog {
Shell dialog;
private Canvas canvas;
int numberImage = 1;
private volatile boolean running = true;
final List<Image> imageCollection = new ArrayList<Image>();
DenemeDialog(Shell parent) {
super(parent);
}
public String open() {
Shell parent = getParent();
this.func();
dialog = new Shell(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL);
dialog.setSize(600, 400);
dialog.setText("Show Begins!!!");
Monitor primary = dialog.getDisplay().getPrimaryMonitor();
Rectangle bounds = primary.getBounds();
Rectangle rect = dialog.getBounds();
int x = bounds.x + (bounds.width - rect.width) / 2;
int y = bounds.y + (bounds.height - rect.height) / 2;
dialog.setLocation(x, y);
dialog.setLayout(new FillLayout());
final Canvas canvas = new Canvas(dialog, SWT.NONE);
final Image image = new Image(dialog.getDisplay(), imageCollection.get(
0).getImageData());
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener() {
public void paintControl(PaintEvent event) {
event.gc.drawImage(image, 0, 0);
}
});
Thread thread = new Thread() {
public void run() {
while (running) {
dialog.getDisplay().asyncExec(new Runnable() {
public void run() {
numberImage = numberImage == imageCollection.size() - 1
? 0 : numberImage + 1;
ImageData nextFrameData = imageCollection.get(
numberImage).getImageData();
Image frameImage = new Image(dialog.getDisplay(),
nextFrameData);
gc.drawImage(frameImage, nextFrameData.x,
nextFrameData.y);
frameImage.dispose();
canvas.redraw();
if (numberImage == 0)
try {
running = false;
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
}
};
dialog.open();
thread.start();
Display display = parent.getDisplay();
while (!dialog.isDisposed()) {
if (!display.readAndDispatch())
display.sleep();
}
return "After Dialog";
}
public void func() {
File path = new File("..\folder");
File[] files = path.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isFile()) { // this line weeds out other
// directories/folders
try {
ImageData imageData = new ImageData(
new ByteArrayInputStream(loadImage(files[i])));
final Image image = new Image(Display.getDefault(),
imageData);
imageCollection.add(image);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
public byte[] loadImage(File file) throws IOException {
BufferedImage image = ImageIO.read(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageIO.write(image, "jpg", bos);
return bos.toByteArray();
}
public Canvas getCanvas() {
return canvas;
}
public void setCanvas(Canvas canvas) {
this.canvas = canvas;
}
Note: None of the examples that rely on SWT's ImageLoader.class will work on GTK Linux SWT, as there is currently a bug that hard-codes the maximum frames to 32 and sets the delay between frames incorrectly.
See GTK ImageLoader.java
// Fix the number of GIF frames as GdkPixbufAnimation does not provide an API to
// determine number of frames.
int num_frames = 32;

Categories

Resources