How to record continously with Androids AudioRecord? - java

I try to record with AudioRecord, the problem I think I have is, that the read method recall is to fast, means that the audioBuffer has overlapped data. How can I process the buffer as soon as this is full? And that continously.
Thank you for every answer.
public class Record {
/**
* TAG for "Android monitor" while debugging
*/
private static final String LOG_TAG = Record.class.getSimpleName();
/**
* calling activity needed for callback functions.
*/
private MainActivity mActivity;
// ???
private final int SAMPLE_RATE = 48000; // Samples per s
private final int BAUD_RATE = 8000; // Symbols per s
private final double T_SYMBOL = 1/(double) BAUD_RATE; // Symbol period in s
private final int NUM_SAMPLES = (int) (T_SYMBOL * SAMPLE_RATE); // Number of samples per symbol
private final int MAX_FRAMES = 3; // Maximum possible number of frames receiving in one sequence
private final int SYMBOLS_PER_FRAME = 10 * 2; // 10 from Uart and 2 because Manchester
private final int MIN_BUFFER_SIZE = MAX_FRAMES * SYMBOLS_PER_FRAME * NUM_SAMPLES;
private final short POS_THRESHOLD = 25000;
private final short NEG_THRESHOLD = -POS_THRESHOLD;
// ???
/**
* Record handling
*
* #param activity the calling activity
*/
public Record(MainActivity activity) {
mActivity = activity;
}
// ========================================================================================== //
/**
* Record the sensor data from the MIC Channel.
*/
public void recordData(){
new Thread(new Runnable() {
#Override
public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
// Buffer size in bytes
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize == AudioRecord.ERROR ||
bufferSize == AudioRecord.ERROR_BAD_VALUE ||
bufferSize < MIN_BUFFER_SIZE) {
bufferSize = MIN_BUFFER_SIZE;
}
short[] audioBuffer = new short[bufferSize / 2];
AudioRecord record = new AudioRecord(MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
if (record.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(LOG_TAG, mActivity.getString(R.string.log_ar_not_init));
return;
}
record.startRecording();
Log.i(LOG_TAG, mActivity.getString(R.string.log_start_recording));
int firstSample = 0;
boolean firstSampleFound = false;
final short[] sample = new short[MIN_BUFFER_SIZE];
short[] tempBuffer = new short[2 * bufferSize];
while (mActivity.getSupplySensor()) {
record.read(audioBuffer, 0, audioBuffer.length);
// Check if data are in the audio buffer (find first sample).
// When yes, copy the audio buffer in the first part of the temp buffer.
// Read the audio Buffer again.
// Copy the audio buffer in the second part of the temp buffer.
if(!firstSampleFound) {
for (int i = 0; i < audioBuffer.length; i++) {
if ((audioBuffer[i] > POS_THRESHOLD) && !firstSampleFound) {
//|| audioBuffer[i] < NEG_THRESHOLD
firstSample = i;
firstSampleFound = true;
arraycopy(audioBuffer, 0, tempBuffer, 0, audioBuffer.length);
}
}
} else {
arraycopy(audioBuffer, 0, tempBuffer, audioBuffer.length, audioBuffer.length);
arraycopy(tempBuffer, firstSample, sample, 0, MIN_BUFFER_SIZE);
firstSampleFound = false;
firstSample = 0;
// Begin: Used for debugging
for (int i = 0; i < sample.length; i++) {
String el = Integer.toString(i);
String elVal = Integer.toString(sample[i]);
Log.e("sample", "not zero " + el + " " + elVal);
}
// Clear audio buffer
for(int i = 0; i < audioBuffer.length; i++){
audioBuffer[i] = 0;
}
// End: Used for debugging
if(mActivity.getSupplySensor()) {
mActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
mActivity.processData(sample);
}
});
}
}
}
record.stop();
record.release();
Log.i(LOG_TAG, mActivity.getString(R.string.log_stop_recording));
/*
// Begin: Used for debugging
for (int i = 0; i < sample.length; i++) {
String el = Integer.toString(i);
String elVal = Integer.toString(sample[i]);
Log.e("sample", "not zero " + el + " " + elVal);
}
// End: Used for debugging
*/
}
}).start();
}
}

Related

Audio merging using OpenSL ES Android

I am trying to record my vocals and then merge them with an audio file together using OPENSL ES Library. I found this GitHub sample called Native-Audio. It merges the two audios. But the background audio file is playing much faster than the actual rate in the final output.
Please use headphones to notice the difference.
Samples Links: Before and After
Also, it uses the files from the assets folder only. How can I manually select MP3 files from file manager?
private void mixAudio(){
try {
if (!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) ||
!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED))
{
// Show rationale and request permission.
ActivityCompat.requestPermissions(this,
new String[]{android.Manifest.permission.READ_EXTERNAL_STORAGE, android.Manifest.permission.WRITE_EXTERNAL_STORAGE},
1000);
}
else {
buttonMix.setEnabled(false);
buttonMix.setText("MIXING....");
textViewMixPath.setText("");
buttonPlay.setEnabled(false);
buttonRecord.setEnabled(false);
buttonStart.setEnabled(false);
listView.setEnabled(false);
Thread thread = new Thread(new Runnable() {
#Override
public void run() {
try{
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
//File file = new File(baseDir + "/mix.wav");
String path = Environment.getExternalStorageDirectory().getPath() + "/VocalRecorder";
File fileParent = new File(path);
if (!fileParent.exists()){
fileParent.mkdir();
}
final File file = new File(fileParent.getPath() + "/mix.wav");
//String author = getApplicationContext().getPackageName() + ".provider";
//Uri videoUri = FileProvider.get(this, author, mediaFile);
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//MediaMuxer muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
String beat = beats[selectedBeat];
//beat = beat.replace(".wav", ".mp3");
AssetFileDescriptor afd = getAssets().openFd(beat);
MediaCodec codec = null;
//ByteBuffer outputBuffer;
//short[] data; // data for the AudioTrack playback
//int outputBufferIndex = -1;
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
final long duration = Long.parseLong(durationStr);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
// right now I am pointing to a URI but I have tested that both will
// play the media file using MediaPlayer
int sampleRate = 0;
int numChannels = 0;
int dstIndex = -1;
int numTracks = extractor.getTrackCount(); //This says 1
for (int i = 0; i < numTracks; ++i) { // so this will just run once
MediaFormat format = extractor.getTrackFormat(i); // getting info so it looks good so far
String mime = format.getString(MediaFormat.KEY_MIME); // "audio/mpeg"
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, null, 0);
//format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
//dstIndex = muxer.addTrack(format);
//writer.setFrameRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
//writer.setSamplesPerFrame(format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
//writer.setBitsPerSample(16);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
break;
}
}
// Calculate the number of frames required for specified duration
long numFrames = (long)(duration * sampleRate/1000);
// Create a wav file with the name specified as the first argument
WavFile wavFile = WavFile.newWavFile(file, numChannels, numFrames, 16, sampleRate);
if (codec == null) {
throw new IllegalArgumentException("No decoder for file format");
}
//ByteBuffer[] inputBuffers = decoder.getInputBuffers();
//ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
/*
Boolean eosReceived = false;
while (!eosReceived) {
int inIndex = decoder.dequeueInputBuffer(1000);
if (inIndex >= 0) {
ByteBuffer buffer = decoder.getInputBuffer(inIndex);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to mDecoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
int outIndex = decoder.dequeueOutputBuffer(info, 1000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
//outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = decoder.getOutputFormat();
Log.d("DecodeActivity", "New format " + format);
//audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outBuffer = decoder.getOutputBuffer(outIndex);
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer);
final byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
//audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
decoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
}
*/
short recordedData[] = recordedData();
int recordMixStartIndex = -1;
//muxer.start();
codec.start();
Boolean sawInputEOS = false;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo infoMux = new MediaCodec.BufferInfo();
int count = 0;
while (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_US);
Log.i(LOG_TAG, "inputBufIndex : " + inputBufIndex);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffer(inputBufIndex);
int sampleSize = extractor.readSampleData(dstBuf, 0);
Log.i(LOG_TAG, "sampleSize : " + sampleSize);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.i(LOG_TAG, "Saw input end of stream!");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
Log.i(LOG_TAG, "presentationTimeUs " + presentationTimeUs);
}
codec.queueInputBuffer(inputBufIndex,
0, //offset
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
Log.i(LOG_TAG, "extractor.advance()");
extractor.advance();
}
}
final int res = codec.dequeueOutputBuffer(info, TIMEOUT_US);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
//final byte[] chunk = new byte[info.size];
//buf.get(chunk); // Read the buffer all at once
short[] shortArray = new short[info.size/2];
buf.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArray);
buf.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
if (shortArray.length > 0) {
//mAudioTrack.write(chunk, 0, chunk.length);
//infoMux.presentationTimeUs = info.presentationTimeUs;
//infoMux.flags = info.flags;
//muxer.writeSampleData(dstIndex, ByteBuffer.wrap(chunk),
// infoMux);
long []longData = new long[shortArray.length];
// Merge data with vocal
// Calculate the time
final long bufferTimer = info.presentationTimeUs/1000;
int vocalCount = 0;
for (int i = 0; i < shortArray.length; i ++) {
//writer.writeShortLittle(shortArray[i]);
long offsetTime = i*1000/(sampleRate*2); // 2 channels
Boolean mixed = false;
if ((offsetTime + bufferTimer > recordStartTime) && (offsetTime + bufferTimer <= recordStopTime + 500)){
if (recordMixStartIndex == -1){
recordMixStartIndex = 0;
}
if (recordMixStartIndex < recordedData.length){
//Log.i("TAG", "############ mix record data: " + recordMixStartIndex);
longData[i] = TPMixSamples((int)(recordedData[recordMixStartIndex]), (int)shortArray[i]/3);
if (vocalCount >= 3) {
recordMixStartIndex++;
vocalCount = 0;
}
else{
vocalCount ++;
}
mixed = true;
}
}
else {
// All done, set sawInputEOS to stop mixing
if (bufferTimer > recordStopTime + 500){
sawInputEOS = true;
}
}
if (!mixed) {
longData[i] = shortArray[i];
}
}
Log.i("TAG", "############ write frames: " + longData.length/2);
wavFile.writeFrames(longData, longData.length/2);
count ++;
if (count % 5 == 0){
runOnUiThread(new Runnable() {
#Override
public void run() {
long percent = bufferTimer*100/duration;
buttonMix.setText("MIXING..." + percent + "%");
}
});
}
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawInputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//codecOutputBuffers = codec.getOutputBuffers();
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
Log.d(LOG_TAG, "Output format has changed to " + oformat);
//mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
// Close the wavFile
wavFile.close();
// muxer.stop();
// muxer.release();
codec.stop();
codec.release();
extractor.release();
runOnUiThread(new Runnable() {
#Override
public void run() {
buttonMix.setText("MIX DONE");
buttonPlay.setEnabled(true);
buttonRecord.setEnabled(true);
textViewMixPath.setText(file.getPath());
buttonStart.setEnabled(true);
listView.setEnabled(true);
}
});
}
catch (Exception e){
}
}
});
thread.start();
}
}
catch (Exception e){
e.printStackTrace();
}
}
private final int INT16_MIN = - 32768;
private final int INT16_MAX = 32767;
private long TPMixSamples(int a, int b) {
if (a > INT16_MAX) {a = INT16_MAX;}
if (a < INT16_MIN) {a = INT16_MIN;}
return
// If both samples are negative, mixed signal must have an amplitude between the lesser of A and B, and the minimum permissible negative amplitude
a < 0 && b < 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MIN) :
// If both samples are positive, mixed signal must have an amplitude between the greater of A and B, and the maximum permissible positive amplitude
( a > 0 && b > 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MAX)
// If samples are on opposite sides of the 0-crossing, mixed signal should reflect that samples cancel each other out somewhat
:
a + b);
}
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native void createBufferQueueAudioPlayer(int sampleRate, int samplesPerBuf);
/////
public static native boolean createAssetAudioPlayer(AssetManager assetManager, String filename);
// true == PLAYING, false == PAUSED
public static native void setPlayingAssetAudioPlayer(boolean isPlaying);
public static native int getDurationAssetAudioPlayer();
public static native int getCurrentPositionAssetAudioPlayer();
//////
public static native boolean createUriAudioPlayer(String uri);
public static native void setPlayingUriAudioPlayer(boolean isPlaying);
public static native void setLoopingUriAudioPlayer(boolean isLooping);
public static native void setChannelMuteUriAudioPlayer(int chan, boolean mute);
public static native void setChannelSoloUriAudioPlayer(int chan, boolean solo);
public static native int getNumChannelsUriAudioPlayer();
public static native void setVolumeUriAudioPlayer(int millibel);
public static native void setMuteUriAudioPlayer(boolean mute);
public static native void enableStereoPositionUriAudioPlayer(boolean enable);
public static native void setStereoPositionUriAudioPlayer(int permille);
public static native boolean selectClip(int which, int count);
public static native void stopClip();
public static native boolean enableReverb(boolean enabled);
public static native boolean createAudioRecorder();
public static native void startRecording();
public static native void stopRecording();
public static native void pauseRecording();
public static native void resumeRecording();
public static native short[] recordedData();
public static native double recordedDuration();
public static native void shutdown();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-audio-jni");
}

Android Camera2: acquire frames from camera preview

I'm a beginner in Android and I have to create an app that charts in real time the images of the camera processed in RGB using Camera2.
I've created an activity where I chart the datas and a fragment for Camera2 based on google example. Well, the part about camera works but I have problems with ImageReader for processing the frames. Inside openCamera, I've open a listener for Image Reader and I've tried to create a background thread for acquiring the frame datas.
private void openCamera(int width, int height) {
// another part of code
ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Thread mythread = new Thread(new Runnable() {
#Override
public void run(){
readImage = mImageReader.acquireLatestImage();
if (readImage == null){
Log.e(TAG, "ERROR");
}
float[] val = caricadato(readImage);
try {
Thread.sleep(100); //600
} catch (InterruptedException e) {
//manage error...
}
}
});
mythread.start();}
};
mImageReader = ImageReader.newInstance(176, 144,
35, 1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
//....continue
}
The "carica dato" method extracts the green channel and makes it the mean (data to be plotted).
private float[] caricadato(Image image){
Log.i(TAG, "The onImageAvailable thread id: " + Thread.currentThread().getId());
float[] v = yuv2rgb(image);
float media = valore_medio(v);
dati_realtime[count]= media;
Log.i(TAG,"dato media"+ media);
Log.i(TAG,"dato real time "+ dati_realtime[count]);
Log.i(TAG,"VALORE CONTATORE "+count);
count++;
image.close();
return dati_realtime;
}
private float[] yuv2rgb(Image image){
ByteBuffer buffer0 = image.getPlanes()[0].getBuffer();
byte[] Y1 = new byte[buffer0.remaining()];
buffer0.get(Y1);
ByteBuffer buffer1 = image.getPlanes()[1].getBuffer();
byte[] U1 = new byte[buffer1.remaining()];
buffer1.get(U1);
ByteBuffer buffer2 = image.getPlanes()[2].getBuffer();
byte[] V1 = new byte[buffer2.remaining()];
buffer2.get(V1);
int Width = image.getWidth();
int Height = image.getHeight();
float[]verdi = new float[image.getHeight()*image.getWidth()];
int contatore = 0;
for(int i = 0; i<Height-1; i++){
for (int j = 0; j<Width; j++){
int Y = Y1[i*Width+j]&0xFF;
int U = U1[(i/2)*(Width/2)+j/2]&0xFF;
int V = V1[(i/2)*(Width/2)+j/2]&0xFF;
U = U-128;
V = V-128;
float R,G,B;
R = (float)(Y + 1.402 * V);
G = (float)(Y - 0.34414 * U - 0.71414 * V);
B = (float) (Y + 1.772 * U);
verdi[contatore]= (byte)G;
contatore++;
}
}
return verdi;
}
The error at start and Resume is often :
E/AndroidRuntime: FATAL EXCEPTION: Thread-5 Process: com.example.francesca.beatandgo, PID: 22610
java.lang.IllegalStateException: maxImages (1) has already been acquired, call #close before acquiring more.
at android.media.ImageReader.acquireNextImage(ImageReader.java:398)
at android.media.ImageReader.acquireLatestImage(ImageReader.java:283)
at com.example.francesca.beatandgo.camera.Camera2VideoFragment$3$1.run(Camera2VideoFragment.java:506)
at java.lang.Thread.run(Thread.java:776)
Camera2VideoFragment.java:506 is readImage = mImageReader.acquireLatestImage(); in openCamera.
How can I resolve this problem? How can I pass data to mainThread into Activity?

Android long text pagination

I have a need to display large text files. Display text without the need to scroll as an e-book. I can break a long text on the page, but it takes me too much time. For example - the following code handles 1.4 MB of text for about 10-15 seconds.
public void split(TextPaint textPaint, String filepath,Context context) {
int pages = 0;
File file = new File(filepath);
char[] bufferChar = new char[1024];
String uncompletedtext="";
//How lines we can show
int maxLinesOnpage = 0;
StaticLayout staticLayout = new StaticLayout(
context.getString(R.string.lorem_ipsum),
textPaint,
pageWidth,
Layout.Alignment.ALIGN_NORMAL,
lineSpacingMultiplier,
lineSpacingExtra,
false
);
int startLineTop = staticLayout.getLineTop(0);
int endLine = staticLayout.getLineForVertical(startLineTop + pageHeight);
int endLineBottom = staticLayout.getLineBottom(endLine);
if (endLineBottom > startLineTop + pageHeight) {
maxLinesOnpage = endLine - 1;
} else {
maxLinesOnpage = endLine;
}
//let's paginate
try {
BufferedReader buffer = new BufferedReader(new FileReader(file));
while (buffer.read(bufferChar)>=0) {
uncompletedtext += new String(bufferChar);
boolean allcomplete = false;
staticLayout = new StaticLayout(
uncompletedtext,
textPaint,
pageWidth,
Layout.Alignment.ALIGN_NORMAL,
lineSpacingMultiplier,
lineSpacingExtra,
false
);
staticLayout.getLineCount();
int curTextPages= (int) Math.floor(staticLayout.getLineCount() / maxLinesOnpage);
uncompletedtext=uncompletedtext.substring(staticLayout.getLineEnd(curTextPages));
pages+=curTextPages;
Log.e("PAGES","" + pages);
}
} catch (Exception e) {
e.printStackTrace();
}
Log.e("FILE READED FULLY!!", "READ COMPLETE!!!!!!!!!!!!!!!!");
}
It is too long. I can not understand how applications such as FBReader and СoolReader handle large files (More than 9 MB) instantly.
I saw the source of the applications, but they have too much functionality to quickly find answer.
I really need help and tips. Thanks.
Thanks to all! I find solution! Not elegant but very fast code (10Mb ~ 600 ms)
public void split(TextPaint textPaint, String filepath,Context context) {
File file = new File(filepath);
char[] bufferChar = new char[512];
//How lines on page
int maxLinesOnpage = 0;
int symbolsOnLine = 0;
StaticLayout staticLayout = new StaticLayout(
context.getString(R.string.lorem_ipsum),//short text with 100 lines (\r\n\r\n\r\n\r\n\r\n\r\n)
textPaint, //MONOSPACE!!!
pageWidth,
Layout.Alignment.ALIGN_NORMAL,
lineSpacingMultiplier,
lineSpacingExtra,
false
);
int startLineTop = staticLayout.getLineTop(0);
int endLine = staticLayout.getLineForVertical(startLineTop + pageHeight);
int endLineBottom = staticLayout.getLineBottom(endLine);
if (endLineBottom > startLineTop + pageHeight) {
maxLinesOnpage = endLine - 1;
} else {
maxLinesOnpage = endLine;
}
symbolsOnLine = staticLayout.getLineEnd(0);
try {
RandomAccessFile rac = new RandomAccessFile(file, "r");
byte[] buffer = new byte[2048];
int wordLen = 0; //Length of word in symbols
int wordInBytes = 0; //Lenght of word
int startLinePos = 0; //Start first line position
int lineWidth = 0; //Current line length
int totalLines =0; //Total lines on current page
Log.e("Start pagination", "" + totalLines);
long timeout= System.currentTimeMillis();
int buflen=0; //buffer size
int totalReadedBytes = 0; //Total bytes readed
byte skipBytes = 0;
while ( (buflen=rac.read(buffer))!=-1){
for (int i=0;i<buflen;i++) {
totalReadedBytes++;
wordInBytes++;
if (skipBytes==0){ //Bytes on one symbol
if (unsignedToBytes(buffer[i])>=192){skipBytes=2;}
if (unsignedToBytes(buffer[i])>=224){skipBytes=3;}
if (unsignedToBytes(buffer[i])>=240){skipBytes=4;}
if (unsignedToBytes(buffer[i])>=248){skipBytes=5;}
if (unsignedToBytes(buffer[i])>=252){skipBytes=6;}
}
//Full bytes on symbol or not
if (skipBytes>0){
skipBytes--;
if (skipBytes>0){continue;}
}
if (buffer[i] == 13) {//We have a \r symbol. Ignore.
continue;
}
if (buffer[i]==10){//New line symbol
if (lineWidth + wordLen>symbolsOnLine){
totalLines++;
if (totalLines > maxLinesOnpage) {
int[] pgsbytes = {startLinePos, totalReadedBytes};
pages.add(pgsbytes);
startLinePos = totalReadedBytes ;
totalLines = 0;
}
}
wordLen=0;
wordInBytes=0;
totalLines++;
lineWidth=0;
if (totalLines>maxLinesOnpage){
int[] pgsbytes = {startLinePos, totalReadedBytes-1};
pages.add(pgsbytes);
startLinePos = totalReadedBytes-1;
totalLines=0;
}
}
if (buffer[i]==32){//Space symbol
if (lineWidth + wordLen+1<=symbolsOnLine){//Word fits in line
lineWidth+=wordLen + 1;
wordLen=0;
if (lineWidth==symbolsOnLine){
totalLines++;
if (totalLines > maxLinesOnpage) {
int[] pgsbytes = {startLinePos, totalReadedBytes};
pages.add(pgsbytes);
startLinePos = totalReadedBytes ;
totalLines = 0;
}
lineWidth = 0;
wordLen = 0;
wordInBytes=0;
}
} else {
if (lineWidth + wordLen==symbolsOnLine){
totalLines++;
if (totalLines > maxLinesOnpage) {
int[] pgsbytes = {startLinePos, totalReadedBytes};
pages.add(pgsbytes);
startLinePos = totalReadedBytes ;
totalLines = 0;
}
lineWidth = 0;
wordLen = 0;
wordInBytes=0;
} else {
totalLines++;
if (totalLines > maxLinesOnpage) {
int[] pgsbytes = {startLinePos, totalReadedBytes - 1 - wordInBytes};
pages.add(pgsbytes);
startLinePos = totalReadedBytes - 1;
totalLines = 0;
}
lineWidth = wordLen + 1;
wordLen = 0;
wordInBytes=0;
}
}
}
if (buffer[i]!=32&&buffer[i]!=10&&buffer[i]!=13){wordLen++; }
if (wordLen==symbolsOnLine){
totalLines++;
if (totalLines>maxLinesOnpage){
int[] pgsbytes = {startLinePos, totalReadedBytes-1 - wordInBytes};
pages.add(pgsbytes);
startLinePos = totalReadedBytes-1;
totalLines=0;
}
lineWidth=0;
wordLen=0;
wordInBytes=0;
}
}
}
rac.close();
timeout = System.currentTimeMillis() - timeout;
Log.e("TOTAL Time", " time " + timeout + "ms");
} catch (Exception e) {
e.printStackTrace();
}
Log.e("FILE READED FULLY!!", "READ COMPLETE!!!!!!!!!!!!!!!!");
}

Exception in thread "main" java.lang.RuntimeException: Stub , viewed all thread but of no use

This is a face recognition class which read the faces from data folder but when i run this code i came up with the error given below. All the Related Threads couldn't solve my problem. any help will be appreciated.
public class FaceRecognition {
/** the logger */
private static final Logger LOGGER = Logger.getLogger(FaceRecognition.class);
//JavaLoggingClassName.loginfo();
/** the number of training faces */
private int nTrainFaces = 0;
private int width = 320;
private int height = 240;
/** the training face image array */
IplImage[] trainingFaceImgArr = null;//IplImage.create(width, height, IPL_DEPTH_8U, 4);
/** the test face image array */
IplImage[] testFaceImgArr= null;
//IplImage image = IplImage.create(width, height, IPL_DEPTH_8U, 4);
// Bitmap mBitmap;
//IplImage image = IplImage.create(width, height, IPL_DEPTH_8U, 4);
//private Bitmap mBitmap;
/** the person number array **/
CvMat personNumTruthMat;
/** the number of persons **/
int nPersons;
/** the person names */
final List personNames = new ArrayList<>();
/** the number of eigenvalues */
int nEigens = 0;
/** eigenvectors */
IplImage[] eigenVectArr;
/** eigenvalues */
CvMat eigenValMat;
/** the average image */
IplImage pAvgTrainImg;
/** the projected training faces */
CvMat projectedTrainFaceMat;
/** Constructs a new FaceRecognition instance. */
public FaceRecognition() {
}
//JavaLoggingClassName.loginfo()
/** Trains from the data in the given training text index file, and store the trained data into the file 'data/facedata.xml'.
*
* #param trainingFileName the given training text index file
*/
public void learn(final String trainingFileName) {
int i;
// load training data
LOGGER.info("===========================================");
//IplImage
LOGGER.info("Loading the training images in " + trainingFileName);
//IplImage image = IplImage.create(width, height, IPL_DEPTH_8U, 4);
//mBitmap.copyPixelsFromBuffer(image.getByteBuffer());
try
{
trainingFaceImgArr /*mBitmap*/ = loadFaceImgArray(trainingFileName);
} catch (Exception e)
{
Log.i("ERROR", "ERROR in Code: " + e.toString());
e.printStackTrace();
}
nTrainFaces = trainingFaceImgArr.length;
LOGGER.info("Got " + nTrainFaces + " training images");
if (nTrainFaces < 3) {
LOGGER.error("Need 3 or more training faces\n"
+ "Input file contains only " + nTrainFaces);
return;
}
LOGGER.info("created projectedTrainFaceMat with " + nTrainFaces + " (nTrainFaces) rows and " + nEigens + " (nEigens) columns");
if (nTrainFaces < 5) {
LOGGER.info("projectedTrainFaceMat contents:\n" + oneChannelCvMatToString(projectedTrainFaceMat));
}
/* #param szFileTest the index file of test images
} catch (IOException ex) {
throw new RuntimeException(ex);
}
LOGGER.info("Data loaded from '" + filename + "': (" + nFaces + " images of " + nPersons + " people).");
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("People: ");
if (nPersons > 0) {
stringBuilder.append("<").append(personNames.get(0)).append(">");
}
for (i = 1; i < nPersons && i < personNames.size(); i++) {
stringBuilder.append(", <").append(personNames.get(i)).append(">");
}
LOGGER.info(stringBuilder.toString());
return faceImgArr;
}
/** Does the Principal Component Analysis, finding the average image and the eigenfaces that represent any image in the given dataset. */
private void doPCA() {
int i;
CvTermCriteria calcLimit;
CvSize faceImgSize = new CvSize();
// set the number of eigenvalues to use
nEigens = nTrainFaces - 1;
LOGGER.info("allocating images for principal component analysis, using " + nEigens + (nEigens == 1 ? " eigenvalue" : " eigenvalues"));
// allocate the eigenvector images
faceImgSize.width(trainingFaceImgArr[0].width());
faceImgSize.height(trainingFaceImgArr[0].height());
eigenVectArr = new IplImage[nEigens];
for (i = 0; i < nEigens; i++) {
eigenVectArr[i] = cvCreateImage(
faceImgSize, // size
IPL_DEPTH_32F, // depth
1); // channels
}
// allocate the eigenvalue array
eigenValMat = cvCreateMat(
1, // rows
nEigens, // cols
CV_32FC1); // type, 32-bit float, 1 channel
// allocate the averaged image
pAvgTrainImg = cvCreateImage(
faceImgSize, // size
IPL_DEPTH_32F, // depth
1); // channels
// set the PCA termination criterion
calcLimit = cvTermCriteria(
CV_TERMCRIT_ITER, // type
nEigens, // max_iter
1); // epsilon
LOGGER.info("computing average image, eigenvalues and eigenvectors");
// compute average image, eigenvalues, and eigenvectors
cvCalcEigenObjects(
nTrainFaces, // nObjects
new PointerPointer(trainingFaceImgArr), // input
new PointerPointer(eigenVectArr), // output
CV_EIGOBJ_NO_CALLBACK, // ioFlags
0, // ioBufSize
null, // userData
calcLimit,
pAvgTrainImg, // avg
eigenValMat.data_fl()); // eigVals
LOGGER.info("normalizing the eigenvectors");
cvNormalize(
eigenValMat, // src (CvArr)
eigenValMat, // dst (CvArr)
1, // a
0, // b
CV_L1, // norm_type
null); // mask
}
/** Stores the training data to the file 'data/facedata.xml'. */
private void storeTrainingData() {
CvFileStorage fileStorage;
int i;
LOGGER.info("writing data/facedata.xml");
// create a file-storage interface
fileStorage = cvOpenFileStorage(
"data/facedata.xml", // filename
null, // memstorage
CV_STORAGE_WRITE, // flags
null); // encoding
// Store the person names. Added by Shervin.
cvWriteInt(
fileStorage, // fs
"nPersons", // name
nPersons); // value
for (i = 0; i < nPersons; i++) {
String varname = "personName_" + (i + 1);
String personame=(String)personNames.get(i);
cvWriteString(
fileStorage, // fs
varname, // name
personame, // string
0); // quote
}
// store all the data
cvWriteInt(
fileStorage, // fs
"nEigens", // name
nEigens); // value
cvWriteInt(
fileStorage, // fs
"nTrainFaces", // name
nTrainFaces); // value
cvWrite(
fileStorage, // fs
"trainPersonNumMat", // name
personNumTruthMat, // value
cvAttrList()); // attributes
cvWrite(
fileStorage, // fs
"eigenValMat", // name
eigenValMat, // value
cvAttrList()); // attributes
cvWrite(
fileStorage, // fs
"projectedTrainFaceMat", // name
projectedTrainFaceMat,
cvAttrList()); // value
cvWrite(fileStorage, // fs
"avgTrainImg", // name
pAvgTrainImg, // value
cvAttrList()); // attributes
for (i = 0; i < nEigens; i++) {
String varname = "eigenVect_" + i;
cvWrite(
fileStorage, // fs
varname, // name
eigenVectArr[i], // value
cvAttrList()); // attributes
}
// release the file-storage interface
cvReleaseFileStorage(fileStorage);
}
/** Opens the training data from the file 'data/facedata.xml'.
*
* #param pTrainPersonNumMat
* #return the person numbers during training, or null if not successful
*/
private CvMat loadTrainingData() {
LOGGER.info("loading training data");
CvMat pTrainPersonNumMat = null; // the person numbers during training
CvFileStorage fileStorage;
int i;
// create a file-storage interface
fileStorage = cvOpenFileStorage(
"data/facedata.xml", // filename
null, // memstorage
CV_STORAGE_READ, // flags
null); // encoding
if (fileStorage == null) {
LOGGER.error("Can't open training database file 'data/facedata.xml'.");
return null;
}
// Load the person names.
personNames.clear(); // Make sure it starts as empty.
nPersons = cvReadIntByName(
fileStorage, // fs
null, // map
"nPersons", // name
0); // default_value
if (nPersons == 0) {
LOGGER.error("No people found in the training database 'data/facedata.xml'.");
return null;
} else {
LOGGER.info(nPersons + " persons read from the training database");
}
// Load each person's name.
for (i = 0; i < nPersons; i++) {
String sPersonName;
String varname = "personName_" + (i + 1);
sPersonName = cvReadStringByName(
fileStorage, // fs
null, // map
varname,
"");
personNames.add(sPersonName);
}
LOGGER.info("person names: " + personNames);
// Load the data
nEigens = cvReadIntByName(
fileStorage, // fs
null, // map
"nEigens",
0); // default_value
nTrainFaces = cvReadIntByName(
fileStorage,
null, // map
"nTrainFaces",
0); // default_value
Pointer pointer = cvReadByName(
fileStorage, // fs
null, // map
"trainPersonNumMat", // name
cvAttrList()); // attributes
pTrainPersonNumMat = new CvMat(pointer);
pointer = cvReadByName(
fileStorage, // fs
null, // map
"eigenValMat", // nmae
cvAttrList()); // attributes
eigenValMat = new CvMat(pointer);
pointer = cvReadByName(
fileStorage, // fs
null, // map
"projectedTrainFaceMat", // name
cvAttrList()); // attributes
projectedTrainFaceMat = new CvMat(pointer);
pointer = cvReadByName(
fileStorage,
null, // map
"avgTrainImg",
cvAttrList()); // attributes
pAvgTrainImg = new IplImage(pointer);
eigenVectArr = new IplImage[nTrainFaces];
for (i = 0; i < nEigens; i++) {
String varname = "eigenVect_" + i;
pointer = cvReadByName(
fileStorage,
null, // map
varname,
cvAttrList()); // attributes
eigenVectArr[i] = new IplImage(pointer);
}
// release the file-storage interface
cvReleaseFileStorage(fileStorage);
LOGGER.info("Training data loaded (" + nTrainFaces + " training images of " + nPersons + " people)");
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("People: ");
if (nPersons > 0) {
stringBuilder.append("<").append(personNames.get(0)).append(">");
}
for (i = 1; i < nPersons; i++) {
stringBuilder.append(", <").append(personNames.get(i)).append(">");
}
LOGGER.info(stringBuilder.toString());
return pTrainPersonNumMat;
}
/** Saves all the eigenvectors as images, so that they can be checked. */
private void storeEigenfaceImages() {
// Store the average image to a file
LOGGER.info("Saving the image of the average face as 'data/out_averageImage.bmp'");
cvSaveImage("data/out_averageImage.bmp", pAvgTrainImg);
// Create a large image made of many eigenface images.
// Must also convert each eigenface image to a normal 8-bit UCHAR image instead of a 32-bit float image.
LOGGER.info("Saving the " + nEigens + " eigenvector images as 'data/out_eigenfaces.bmp'");
if (nEigens > 0) {
// Put all the eigenfaces next to each other.
int COLUMNS = 8; // Put upto 8 images on a row.
int nCols = Math.min(nEigens, COLUMNS);
int nRows = 1 + (nEigens / COLUMNS); // Put the rest on new rows.
int w = eigenVectArr[0].width();
int h = eigenVectArr[0].height();
CvSize size = cvSize(nCols * w, nRows * h);
final IplImage bigImg = cvCreateImage(
size,
IPL_DEPTH_8U, // depth, 8-bit Greyscale UCHAR image
1); // channels
for (int i = 0; i < nEigens; i++) {
// Get the eigenface image.
IplImage byteImg = convertFloatImageToUcharImage(eigenVectArr[i]);
// Paste it into the correct position.
int x = w * (i % COLUMNS);
int y = h * (i / COLUMNS);
CvRect ROI = cvRect(x, y, w, h);
cvSetImageROI(
bigImg, // image
ROI); // rect
cvCopy(
byteImg, // src
bigImg, // dst
null); // mask
cvResetImageROI(bigImg);
cvReleaseImage(byteImg);
}
cvSaveImage(
"data/out_eigenfaces.bmp", // filename
bigImg); // image
cvReleaseImage(bigImg);
}
}
/** Converts the given float image to an unsigned character image.
*
* #param srcImg the given float image
* #return the unsigned character image
*/
private IplImage convertFloatImageToUcharImage(IplImage srcImg) {
IplImage dstImg;
if ((srcImg != null) && (srcImg.width() > 0 && srcImg.height() > 0)) {
// Spread the 32bit floating point pixels to fit within 8bit pixel range.
CvPoint minloc = new CvPoint();
CvPoint maxloc = new CvPoint();
double[] minVal = new double[1];
double[] maxVal = new double[1];
cvMinMaxLoc(srcImg, minVal, maxVal, minloc, maxloc, null);
// Deal with NaN and extreme values, since the DFT seems to give some NaN results.
if (minVal[0] < -1e30) {
minVal[0] = -1e30;
}
if (maxVal[0] > 1e30) {
maxVal[0] = 1e30;
}
if (maxVal[0] - minVal[0] == 0.0f) {
maxVal[0] = minVal[0] + 0.001; // remove potential divide by zero errors.
} // Convert the format
dstImg = cvCreateImage(cvSize(srcImg.width(), srcImg.height()), 8, 1);
cvConvertScale(srcImg, dstImg, 255.0 / (maxVal[0] - minVal[0]), -minVal[0] * 255.0 / (maxVal[0] - minVal[0]));
return dstImg;
}
return null;
}
/** Find the most likely person based on a detection. Returns the index, and stores the confidence value into pConfidence.
*
* #param projectedTestFace the projected test face
* #param pConfidencePointer a pointer containing the confidence value
* #param iTestFace the test face index
* #return the index
*/
private int findNearestNeighbor(float projectedTestFace[], FloatPointer pConfidencePointer) {
double leastDistSq = Double.MAX_VALUE;
int i = 0;
int iTrain = 0;
int iNearest = 0;
LOGGER.info("................");
LOGGER.info("find nearest neighbor from " + nTrainFaces + " training faces");
for (iTrain = 0; iTrain < nTrainFaces; iTrain++) {
//LOGGER.info("considering training face " + (iTrain + 1));
double distSq = 0;
for (i = 0; i < nEigens; i++) {
//LOGGER.debug(" projected test face distance from eigenface " + (i + 1) + " is " + projectedTestFace[i]);
float projectedTrainFaceDistance = (float) projectedTrainFaceMat.get(iTrain, i);
float d_i = projectedTestFace[i] - projectedTrainFaceDistance;
distSq += d_i * d_i; // / eigenValMat.data_fl().get(i); // Mahalanobis distance (might give better results than Eucalidean distance)
// if (iTrain < 5) {
// LOGGER.info(" ** projected training face " + (iTrain + 1) + " distance from eigenface " + (i + 1) + " is " + projectedTrainFaceDistance);
// LOGGER.info(" distance between them " + d_i);
// LOGGER.info(" distance squared " + distSq);
// }
}
if (distSq < leastDistSq) {
leastDistSq = distSq;
iNearest = iTrain;
LOGGER.info(" training face " + (iTrain + 1) + " is the new best match, least squared distance: " + leastDistSq);
}
}
// Return the confidence level based on the Euclidean distance,
// so that similar images should give a confidence between 0.5 to 1.0,
// and very different images should give a confidence between 0.0 to 0.5.
float pConfidence = (float) (1.0f - Math.sqrt(leastDistSq / (float) (nTrainFaces * nEigens)) / 255.0f);
pConfidencePointer.put(pConfidence);
LOGGER.info("training face " + (iNearest + 1) + " is the final best match, confidence " + pConfidence);
return iNearest;
}
/** Returns a string representation of the given float array.
*
* #param floatArray the given float array
* #return a string representation of the given float array
*/
private String floatArrayToString(final float[] floatArray) {
final StringBuilder stringBuilder = new StringBuilder();
boolean isFirst = true;
stringBuilder.append('[');
for (int i = 0; i < floatArray.length; i++) {
if (isFirst) {
isFirst = false;
} else {
stringBuilder.append(", ");
}
stringBuilder.append(floatArray[i]);
}
stringBuilder.append(']');
return stringBuilder.toString();
}
/** Returns a string representation of the given float pointer.
*
* #param floatPointer the given float pointer
* #return a string representation of the given float pointer
*/
private String floatPointerToString(final FloatPointer floatPointer) {
final StringBuilder stringBuilder = new StringBuilder();
boolean isFirst = true;
stringBuilder.append('[');
for (int i = 0; i < floatPointer.capacity(); i++) {
if (isFirst) {
isFirst = false;
} else {
stringBuilder.append(", ");
}
stringBuilder.append(floatPointer.get(i));
}
stringBuilder.append(']');
return stringBuilder.toString();
}
/** Returns a string representation of the given one-channel CvMat object.
*
* #param cvMat the given CvMat object
* #return a string representation of the given CvMat object
*/
public String oneChannelCvMatToString(final CvMat cvMat) {
//Preconditions
if (cvMat.channels() != 1) {
throw new RuntimeException("illegal argument - CvMat must have one channel");
}
final int type = cvMat.type();
StringBuilder s = new StringBuilder("[ ");
for (int i = 0; i < cvMat.rows(); i++) {
for (int j = 0; j < cvMat.cols(); j++) {
if (type == CV_32FC1 || type == CV_32SC1) {
s.append(cvMat.get(i, j));
} else {
throw new RuntimeException("illegal argument - CvMat must have one channel and type of float or signed integer");
}
if (j < cvMat.cols() - 1) {
s.append(", ");
}
}
if (i < cvMat.rows() - 1) {
s.append("\n ");
}
}
s.append(" ]");
return s.toString();
}
/** Executes this application.
*
* #param args the command line arguments
*/
public static void main(final String[] args) {
BasicConfigurator.configure();
// PropertyConfigurator.configure(args[0]);
// if(args[0]!=null)
/*{
System.out.println("null index");
}
else continue;
*/
final FaceRecognition faceRecognition = new FaceRecognition();
// main myMain = new main();
// myMain.FaceRecognition();
//faceRecognition.learn("data/some-training-faces.txt");
// faceRecognition.learn("G:\\android_support\\javacv-examples\\OpenCV2_Cookbook\\data\\all10.txt");
faceRecognition.learn("data/all100.txt");
//faceRecognition.recognizeFileList("data/some-test-faces.txt");
// faceRecognition.recognizeFileList("G:\\android_support\\javacv-examples\\OpenCV2_Cookbook\\data\\lower3.txt");
faceRecognition.recognizeFileList("data/lower3.txt");
}
}
Error
Exception in thread "main" java.lang.RuntimeException: Stub!
at android.util.Log.i(Log.java:9)
at FaceRecognition.learn(FaceRecognition.java:126)
at FaceRecognition.main(FaceRecognition.java:846)
It seems that you try to test android classes on the host computer. Perhaps using JUnit. That does not work. There are two options open:
If possible move the functionality into library so that no android packages are used. For example use java.util.logging instead of android.util.Log.
Use android instrumentation tests (google it).
And if you use Maven: remember that the android library is <scope>provided</scope>.

JAVA SWT Animated GIF

I need to add animated gif in form of spinner, into some SWT widget like for example Label. This Label will be as default labelSpiner.setEnabled(false). When I start some long duration operation this Label with spinner will show (labelSpinner.setEnabled(true))in right corner of window.
Normaly Image can be added to Label by labelSpinner.setImage(arg0)
If I add this SPINNER.GIF normal way into Label, it wont animate, its only static Image.
Does anybody know how to add animated gif (for example some spinner) into SWT based JAVA SE app widget? I browsed many examples but many of them were useless, and those good were too complicated.
I would like to do it very simple.
Can somebody help?
Did you try the Eclipse article about SWT Images?
This part will load the image and display it in a Canvas:
ImageLoader loader = new ImageLoader();
loader.load(getClass().getResourceAsStream("Idea_SWT_Animation.gif"));
Canvas canvas = new Canvas(shell,SWT.NONE);
image = new Image(display,loader.data[0]);
int imageNumber;
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener(){
public void paintControl(PaintEvent event){
event.gc.drawImage(image,0,0);
}
});
And this part updates the gif:
Thread thread = new Thread(){
public void run(){
long currentTime = System.currentTimeMillis();
int delayTime = loader.data[imageNumber].delayTime;
while(currentTime + delayTime * 10 > System.currentTimeMillis()){
// Wait till the delay time has passed
}
display.asyncExec(new Runnable(){
public void run(){
// Increase the variable holding the frame number
imageNumber = imageNumber == loader.data.length-1 ? 0 : imageNumber+1;
// Draw the new data onto the image
ImageData nextFrameData = loader.data[imageNumber];
Image frameImage = new Image(display,nextFrameData);
gc.drawImage(frameImage,nextFrameData.x,nextFrameData.y);
frameImage.dispose();
canvas.redraw();
}
});
}
};
shell.open();
thread.start();
After trying at least 3 different animated GIF examples, none of which worked, I started working on my own, based mainly on the answer above.
Here is a complete running example including:
a base64 decoder (courtesy of http://www.source-code.biz/base64coder/java/ )
spinner GIF
main method
Remove the main method, base64 methods, and image data, and you will have a working animated GIF canvas.
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.ImageLoader;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
public class AnimatedGif extends Canvas {
public static void main(String[] args) throws IOException {
byte[] d = decode(b64.toCharArray(), 0, b64.length());
Display display = new Display ();
Shell shell = new Shell(display);
shell.setBounds(0, 0, 100, 100);
AnimatedGif gif = new AnimatedGif(shell, SWT.NONE);
gif.setLocation(10,10);
gif.setSize(16, 16);
gif.load(new java.io.ByteArrayInputStream(d));
shell.open();
gif.animate();
while (!shell.isDisposed ()) {
if (!display.readAndDispatch ()) display.sleep ();
}
display.dispose ();
}
private final ImageLoader loader = new ImageLoader();
private int img = 0;
private volatile boolean animating = false;
private Thread animateThread;
public AnimatedGif(Composite parent, int style) {
super(parent, style);
}
public void load (InputStream resource) throws IOException {
loader.load(resource);
}
public void animate() {
if (animateThread == null) {
animateThread = createThread();
animateThread.setDaemon(true);
}
if (animateThread.isAlive())
return;
animateThread.start();
}
public void stop() {
animating = false;
if (animateThread != null)
try {
animateThread.join();
animateThread = null;
} catch (InterruptedException e) {
// do nothing
}
}
private Thread createThread() {
return new Thread() {
long currentTime = System.currentTimeMillis();
final Display display = getParent().getDisplay();
public void run() {
animating = true;
while(animating) {
img = (img == loader.data.length-1) ? 0 : img + 1;
int delayTime = Math.max(50, 10*loader.data[img].delayTime);
long now = System.currentTimeMillis();
long ms = Math.max(currentTime + delayTime - now, 5);
currentTime += delayTime;
try {
Thread.sleep(ms);
} catch(Exception e) {
return;
}
if (!display.isDisposed())
display.asyncExec(new Runnable() {
#Override
public void run() {
ImageData nextFrameData = loader.data[img];
Image frameImage = new Image(display, nextFrameData);
new GC(AnimatedGif.this).drawImage(frameImage, nextFrameData.x, nextFrameData.y);
frameImage.dispose();
//canvas.redraw();
}
});
}
Display.getDefault().asyncExec(new Runnable() {
#Override
public void run() {
new GC(AnimatedGif.this).fillRectangle(
0,
0,
getBounds().width,
getBounds().height);
}
});
}
};
}
private static final char[] map1 = new char[64];
static {
int i = 0;
for (char c = 'A'; c <= 'Z'; c++)
map1[i++] = c;
for (char c = 'a'; c <= 'z'; c++)
map1[i++] = c;
for (char c = '0'; c <= '9'; c++)
map1[i++] = c;
map1[i++] = '+';
map1[i++] = '/';
}
private static final byte[] map2 = new byte[128];
static {
for (int i = 0; i < map2.length; i++)
map2[i] = -1;
for (int i = 0; i < 64; i++)
map2[map1[i]] = (byte) i;
}
public static byte[] decode(char[] in, int iOff, int iLen) {
if (iLen % 4 != 0)
throw new IllegalArgumentException(
"Length of Base64 encoded input string is not a multiple of 4.");
while (iLen > 0 && in[iOff + iLen - 1] == '=')
iLen--;
int oLen = (iLen * 3) / 4;
byte[] out = new byte[oLen];
int ip = iOff;
int iEnd = iOff + iLen;
int op = 0;
while (ip < iEnd) {
int i0 = in[ip++];
int i1 = in[ip++];
int i2 = ip < iEnd ? in[ip++] : 'A';
int i3 = ip < iEnd ? in[ip++] : 'A';
if (i0 > 127 || i1 > 127 || i2 > 127 || i3 > 127)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int b0 = map2[i0];
int b1 = map2[i1];
int b2 = map2[i2];
int b3 = map2[i3];
if (b0 < 0 || b1 < 0 || b2 < 0 || b3 < 0)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int o0 = (b0 << 2) | (b1 >>> 4);
int o1 = ((b1 & 0xf) << 4) | (b2 >>> 2);
int o2 = ((b2 & 3) << 6) | b3;
out[op++] = (byte) o0;
if (op < oLen)
out[op++] = (byte) o1;
if (op < oLen)
out[op++] = (byte) o2;
}
return out;
}
static String b64 =
"R0lGODlhEAAQAPf/AMra5KO9zOzz9s3b5MDS3aS+zZOwwsrZ4+bt89jm7IKftuLr" +
"8ZCswH6bs9vn7qrD0+Ts8oqnvYKetomlu8PV4t7p79Pg6dzp79vm7q/F1aC6zdTi" +
"6bbL2XqYsdDf55SwxK/H1ajC0Yajudzp7q7E087c59Xi6niWr8XY4qW/0LnM2aO9" +
"zoyov7bK2djk7ICbtIOhtt7p7pq1yc3b5sDU38DR39vm7LDH1afA0Yqovebu85Gt" +
"wouovsvZ5oCdtunv9IaiuXyZscrb5qnC0Zezxdjj64WiuZ65y7vQ3N/p8OLq8avE" +
"1MTX4o6rwJaxxLnN29jm7a7F1dvm7Y2qwH2bs7zQ3Jizx9Df6djj7HmXr+Tt8rvO" +
"26/E1LrO3KC5zKW+z8/e577Q3oCctYShubTK2HyasZGtwHuYsdTg6enw89/p76zE" +
"04iku+Lr8LXJ2JKvw+Xt8tHf583b57vP3pu2ycjY45m0yNvp7qzE1bHI13qZsaK8" +
"zn2aspq0yODp8Nfi6svb5oaiuuDp76vE09Hf6ZWzxMfY4qnC087e5+rx9eLs8pSv" +
"xL/R3rPI2Nfj64ajuq7E1Y2pv7rQ2+Lp8J65yarC1JKvxLzR3nybsrzQ3unw9HmY" +
"sLvR3LPI17nO28rY5Iqlvc3c5urx9Iakus/f53iVrtPi6cnY436btNTi6tfi67PK" +
"1rHI2H6bspy3yJ63ydXi7JezxKjA0ZGuwNjk64CbtazE0Zezx8DR3NXi68TX4Nvn" +
"7e7097vP24CctH2atIqlvN7o7tPh6Nzn7JGvwK/F1MbX4dHg6MfY4b7S3c/e5n2b" +
"srjN2ezy9eDq78rb4svb473R2+Ts8cHV3r7R3eXu8rfN2JSvwo+tv9/p7pSxw6C7" +
"ytbj6cTX36zD1NHg5vL3+KnB0Onx9cPV35y4yLPJ1dnl7LnO2cHU3avD0bnP2sDV" +
"4pKvwdfk7O7z99Xi6XqXsNTi6N3o7sjY4ubv8s7c5qvE0env842svuLs8Z64yJm0" +
"xZq2x46svuLq8P///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQFAAD/ACwAAAAAEAAQ" +
"AAAIvQD/CRR4poEEf0q0DFz478SZFwqAxPC3AAEChqU6GAQSwcHEihcFntAoQUQE" +
"BgkcVKCI4IdAkiYZfCiScmXFfzBzMFhYk6K/jScZ/uspSEFMJ0KxpIxxAQgPBk5k" +
"CDVRBIoDDE8/yNAg1MIGmglkyvDy5cHCEh4smIC14d/WLyGGQGpBA4UQtBYsCDyy" +
"AseDByBaqKhhqAfaKwNTPIiyBAQZFUjqGmY4KEMGEG5UdAlDgYlQgQ9IdOJQhRPD" +
"gAAh+QQFAAD/ACwAAAAAEAAQAAAI1gD/CRR4IkiZBjx0DFwoMIueBj58EBGlRAtD" +
"h1TEKAikgJUOfxAWbsKk0QgbFjmEJEkiEIFAVBLGjMoxZUcfG1AEhvz3YiOoSAz/" +
"3VHzD8EPIxNYMPgQVMqFSRAWsGjyhoidoAmkVFjgb8eHPq80BMWS4I4fQZboaFiB" +
"I6gJFxhGXDiyJ4WsIZUWItrwxwYGF/9ShHjwIEOLGkwAkbJgAgvgwHjwLLnB4QmF" +
"HnI8oNmwYSCJDCDycPBUA8UMMHGuMHxAQvSTTBTqzBASVCCIFlWQ1KDAMCAAIfkE" +
"BQAA/wAsAAAAABAAEAAACMsA/wkUeKZDmSANfAxc+K8BlRe+FNRSMIbKMoYwFCjI" +
"CAOGEY1iqgyMMIHkhJIRgE14JImXEoEMdpiJyYBBpBxN5mjyB+GfEytWiNyyspCR" +
"ohE8f2jQ4GUpw38JLvBE8CVFChxDnkadOmTQoRCQtEqFoKNYlAdrMohNyiVDlBst" +
"5ix0lAAphAX/uNwgoyIMDQpMdJWAgrSNQC6NODyZQ4NJnVAWCFeYNLDFli6MDM3w" +
"gCZXgl0XGFaRVAPFZjQmHEl5KhCaaQ+EKDAMCAAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"2wD/CRRIJUiHLCdODFz47xEMCT5eNOBjMAjDKTkisBFh5J+CiK3KDFy06I2ZJix4" +
"TJggYowCPr8EHqEjg4iTDzuaTGExQYEVQP9WCN3j5cjCKXYuIfihZEiIEDhwMBSo" +
"KMYCLSSiHAqxZKoNB0kW/MvAZQmJDFP/jfi3AMEaEDcacZgqZSACEjfIcOjCZKEL" +
"DGr+IdDxz42KJ0hQCEFkQVUCB34WEP6nYssWGqfkxDH1B8MIfwv8DUQShsKnEoQ2" +
"FMEQI4lohhR6gLFg4q+DCmn/zQBzZUMqGwwDAgAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"uAD/CRTIgAcQBQ06DFz474iMDwVFSEh4gmGKFRoeMogAZGKHUgMHPcDxxYsMJxsl" +
"UhSYIcqDEF8yosyh8sy/BxmWPBjyYGFKCS/OkABBFBLDfxsPNujkhkyLFkchcpTA" +
"QYVVGkeLJHAQw1+VLkhqoDi6tasSTmFoGBJSYuHWCv4WaPlHAUWPEh4smMDyNi4C" +
"gUzu5t2g1QHcBQj+CsSrtwgUw34VD7xiAVZZvz+OCtyQAMMFQf4YBgQAIfkEBQAA" +
"/wAsAAAAABAAEAAACNYA/wkUeMTSDhZGXgxc+C9Fij10PjSZoADVJoYk8IRIoaHP" +
"GxaBJGDKMvBBBjwPZK14RYQBqDFi9AwkAWLJgyE4NNj5EGmUAiokQeTJcyNDJYY5" +
"jMQ80YKD0xYM/01ho6BBkCpPPD2pEfVgIB9lkGSqQYFJ1D45FPhoQBZFD0BRbQhh" +
"RYQHhToz5JBCxBBKEh2idPybAcaDhQ0msCSQcudfEn9KBAqJg8bEHxeLL6gRCEHL" +
"wCsbsNjAcKfCpH8Qov7b4ALDCD8LICD4N1u1iwuC/C34wTAgACH5BAUAAP8ALAAA" +
"AAAQABAAAAjNAP8JFMil2JAvGpwMXPiPC5cMUQal0GCFQQSGLRrdiPLgUAovVnZM" +
"gDGwyhYOZG6sCYFDAxEzERQ0ECipyxMVLTJAGqLhFoMJCqj8o1CD0ZwwcxaGHPni" +
"DAoUhmjQYPjvpwJfHWZoZUKBKkwYCsp4GFunK8NIEWDUCkIIDZpQuqjmAGZEJgUT" +
"uSyUcLSQUROgY8b8c5QACpQEiBErmvMoqEApu0ZIvkD5wghNksQsG3ihgr/PoD/z" +
"qsJwUhsIqFNDUEJV4IIfOhAg+MEwIAAh+QQFAAD/ACwAAAAAEAAQAAAI1wD/CRRI" +
"Yk0GEkNWDFz4T4WbGyC4RAmx4sgihki2qCBzY8khinQWTRlIIcyWJxwakQiBY4+M" +
"NzkeCaRAgQaSLhwyLMHhhYiZCDD+zejx6RQKJguPOGnCRgIVMGBKyBHC8N8HFiJ8" +
"BLligVAcRFV38DDyosMGExtMWajaZMK/BllSuSjyR1XVKRMU8DlhA4PfBC4WTsGq" +
"9cQ/BzFGOMAg5Z+Nf3YmjCkrsEISf37U/FOs6JICBa2CDPS3YMHAJDEQWOFThqE/" +
"HQj+lV7wA9CvqgJhx9aihGFAACH5BAUAAP8ALAAAAAAQABAAAAjAAP8JFMipCodO" +
"JB4MXPiPCYUwXVS4AZEhwyCGJXqgoIFEBRkQS6I8SDHwioeMhmqoaAHiwQMcK44I" +
"tGDhpJCNLSANCfFFg4x/G2CZqFli4YMvXmR8YJAgQZENFhj+87mUBwYHUIqYkCrD" +
"CQMeQC7EcJAAi1SvEUQoEOSvAlmpDCIAkdDA34K2bwcyyPGPbod/CO66dbo0Ld2B" +
"PwLjTRBXhN8TAxEgEDhWrt9SDCXfjQFEwYszkKX+06LEH90zDAMCACH5BAkAAP8A" +
"LAAAAAAQABAAAAjWAP8JFEihBpIqLUAMXPhPyIw6FDI94ZCHxAOGV+KAmYGihieK" +
"IDKQGLhhAxoPcnpQmHhjCR48KQS6wGLCAilATGq0yPDgQYiYLjDY+LMB0cJKQ2Sl" +
"2HPkwggMLkww/IdjhQY6lgT5uZMAy1QNr/p82OFvQQUpCabaIfKmCYsFECZdkDL1" +
"AwMWE4z8QPBPzZ2pkUAFUvBCIASBUGz02TElx6gxElAJ5PsvSRIhOViwMaJADKZN" +
"CyH408FKwWAxVPRkYahFiSgiPnw0UD11oA4eDcoEOcEwIAAh+QQJAAD/ACwAAAAA" +
"EAAQAAAIyAD/CRw4zAYtYdwGKvyXz1m2YAaF1XsGbWE1aW3UQKQVb8CBA8YG3qMH" +
"Z4E8c++OKQMwD9k4XALTpClp7tsxD8oGIOs2DaamRDPbOFMIwBi6ZARwuQMKp83C" +
"fwSiVUEqgKnThdTU9YrWq5nVp73OMWNmzWvQp6tWlUv7j6m4AQpt2Utny1a4tmnc" +
"AQhAaVuAAAUABxaoCdwzfq70UaJEzpUrSo8HQjNQqNC+WLEKZda8cNYsIoWuFTLA" +
"bhaxpwK1EWOADV+/hQEBACH5BAUAAP8ALAAAAAAQABAAAAjcAP8JFGgj1YYrYGYM" +
"XPivggMMLkxYANODAsN//pLEwFBkA6ESnyiEQTLQ3wJ/IzD8MRVHzikaW7aoEKhj" +
"gR8HCVRZQCQEBZInKtz804HAnxqIC5l04UDmBgkEEPxdkHKRQ6MbINYgODkiwcUM" +
"JJZwybA1iQMbF5eEOBSFhJYFMRR5ZYgDR4gQQ5T8QHDJzpSFR7zsWUH4HyArCiaw" +
"mNJkxwcnRGTQOSLwFx8FY0RMmMCDRRMzbxYtGlim1QsfCmAYEcEmQo6/C4N0CMKn" +
"wWkJMB5dFHjiRJbZVBgGBAAh+QQFAAD/ACwAAAAAEAAQAAAIuQD/CRToT9AFDAk2" +
"DFz47weCBf5iOEgAy8IVhgge+qvgAEoRExY8lBiYESLHBEU2hCzRg4lAjSexgBTZ" +
"AwWFf1pMTlxYQoghGmE4KYm4kyGKGki6VCGKkuE/GiqicpAAJAKDD05btCDjplMD" +
"BVUZOIUEoiyJMy8kiLC68MGQB0syPPh3poHaHAycyNDwJcSDKBkEnuhgd21eGV6+" +
"4HgwaGApwlStfti7IgXDwYV5XJVxxKlAwmA1MwwIACH5BAUAAP8ALAAAAAAQABAA" +
"AAjVAP8JFPhjgT9BF1wMXPgPQUMIC/yMwOBiA0OBEP5NqnAHgw0sG64M1JLxn5oL" +
"UhK4+GMCTRwhApX4S/LvTkosJjZY8ABmxj8donQkgcIQESk5M+pQ4EGElRAbFwH1" +
"QEGhRgMfCnL0ucikaiYkZXwEYrHjYo0nnp5UCdJAAZspF1twmNvihB4xRnIwrJTh" +
"Rp48IP5loaJgVKQPdjTgGPJgCQgSA++OAcWAyKsVsh7gyfBgYBZMEsa+6aMhRQg8" +
"kBduQqVgQpMPdPakSHFR4AsjZC0dYRgQACH5BAUAAP8ALAAAAAAQABAAAAjRAP8J" +
"FFgBAQIdP2wMXPhPCYSHECG0GcaQBi9/GDNirNBuIA8xkjSNuEDywoiTuzYIpKLg" +
"0RxFCWLGhAIlgaN/D4womNCE0UJHJSzkMpHphQIjwFgw/KcrFBo0hCTUghHBy1JG" +
"dTx4CNVAAVUiS9cxmTGjRxlfO80spUHDEAoUE17AmLDDysIuYeYwqnHpH8sJDG5p" +
"CFGJSwsVT7pIEtg1ghkiGnCEWHODDIctWwbOreslxaEHUW40usEwAgMrGlIMipKB" +
"y5qlAp1o+DKExBeGAQEAIfkEBQAA/wAsAAAAABAAEAAACNcA/wkUqETLPwQIByr8" +
"xwPQjwUQD+rAsBATHysIYiQZCNHfwBWtFCi4pMjBiH9q/PhLkkBghxdjJtj5Z+Of" +
"FAwmYzj4xyaIDxEsmih0kQCDUSzw+CiYsGPhP1V/irhIlaXBvwmWnFowtcHElTIv" +
"jPD44FRIHEIWrvgESnahEDklwMygIoFNEydHFFJAcepTjzr/YEQwQ8QLjgfeOHRB" +
"QoMCBYFAcryRsQdHCBKNODzZEobDwCmL6KwIcWjJDTIqtiBZaObI6ChcQNxw48ap" +
"wBVDSGRYs2ZhQAA7";
}
here is sample code provided. Basically this is how it works. Unlike other image formats, Animated GIF will have setof ImageData ( like a frame in an animation). You will render this image data on Cavans with the delay that you want.
http://www.java2s.com/Code/Java/SWT-JFace-Eclipse/DisplayananimatedGIF.htm
This one is also working example code based upon Eclipse article about SWT. I was trying to make a slideshow or animated gif dialog from images located in a folder. posted here because it could be useful for somenone.
public class GifDialog extends Dialog {
Shell dialog;
private Canvas canvas;
int numberImage = 1;
private volatile boolean running = true;
final List<Image> imageCollection = new ArrayList<Image>();
DenemeDialog(Shell parent) {
super(parent);
}
public String open() {
Shell parent = getParent();
this.func();
dialog = new Shell(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL);
dialog.setSize(600, 400);
dialog.setText("Show Begins!!!");
Monitor primary = dialog.getDisplay().getPrimaryMonitor();
Rectangle bounds = primary.getBounds();
Rectangle rect = dialog.getBounds();
int x = bounds.x + (bounds.width - rect.width) / 2;
int y = bounds.y + (bounds.height - rect.height) / 2;
dialog.setLocation(x, y);
dialog.setLayout(new FillLayout());
final Canvas canvas = new Canvas(dialog, SWT.NONE);
final Image image = new Image(dialog.getDisplay(), imageCollection.get(
0).getImageData());
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener() {
public void paintControl(PaintEvent event) {
event.gc.drawImage(image, 0, 0);
}
});
Thread thread = new Thread() {
public void run() {
while (running) {
dialog.getDisplay().asyncExec(new Runnable() {
public void run() {
numberImage = numberImage == imageCollection.size() - 1
? 0 : numberImage + 1;
ImageData nextFrameData = imageCollection.get(
numberImage).getImageData();
Image frameImage = new Image(dialog.getDisplay(),
nextFrameData);
gc.drawImage(frameImage, nextFrameData.x,
nextFrameData.y);
frameImage.dispose();
canvas.redraw();
if (numberImage == 0)
try {
running = false;
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
}
};
dialog.open();
thread.start();
Display display = parent.getDisplay();
while (!dialog.isDisposed()) {
if (!display.readAndDispatch())
display.sleep();
}
return "After Dialog";
}
public void func() {
File path = new File("..\folder");
File[] files = path.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isFile()) { // this line weeds out other
// directories/folders
try {
ImageData imageData = new ImageData(
new ByteArrayInputStream(loadImage(files[i])));
final Image image = new Image(Display.getDefault(),
imageData);
imageCollection.add(image);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
public byte[] loadImage(File file) throws IOException {
BufferedImage image = ImageIO.read(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageIO.write(image, "jpg", bos);
return bos.toByteArray();
}
public Canvas getCanvas() {
return canvas;
}
public void setCanvas(Canvas canvas) {
this.canvas = canvas;
}
Note: None of the examples that rely on SWT's ImageLoader.class will work on GTK Linux SWT, as there is currently a bug that hard-codes the maximum frames to 32 and sets the delay between frames incorrectly.
See GTK ImageLoader.java
// Fix the number of GIF frames as GdkPixbufAnimation does not provide an API to
// determine number of frames.
int num_frames = 32;

Categories

Resources