I'm new in javacv. I'm trying to create a system for face recognition and faced to a problem of low accurary of recognition. I use EigenFaceRecognizer for identification and Georgia Tech Face Database as a database for photos. All my photos are with 640x480 resolution(I have 12 photos per person and 20 people in total).
Result is very unstable and it doesn't work sometimes. How can I increase level of recognition accuracy? Should I do any extra editting for photos or the problem is in my code?
Train method
public void train() {
File imagesDir = new File("C:\\Java_Eclipse\\FaceRecognitionWebcam\\src\\image");
FilenameFilter imgFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
name = name.toLowerCase();
return name.endsWith(".jpg") || name.endsWith(".pgm") || name.endsWith(".png");
}
};
File[] imageFiles = imagesDir.listFiles(imgFilter);
MatVector images = new MatVector(imageFiles.length);
Mat labels = new Mat(imageFiles.length, 1, CV_32SC1);
IntBuffer labelsBuf = labels.createBuffer();
int counter = 0;
for (File image : imageFiles) {
Mat img = imread(image.getAbsolutePath(), CV_LOAD_IMAGE_GRAYSCALE);
int label = Integer.parseInt(image.getName().split("\\-")[0]);
images.put(counter, img);
labelsBuf.put(counter, label);
counter++;
}
// FaceRecognizer faceRecognizer = FisherFaceRecognizer.create();
FaceRecognizer faceRecognizer = EigenFaceRecognizer.create();
// FaceRecognizer faceRecognizer = LBPHFaceRecognizer.create();
System.out.println("Train started");
faceRecognizer.train(images, labels);
faceRecognizer.save("C:\\Java_Eclipse\\FaceRecognitionWebcam\\src\\train_result_eigen.xml");
System.out.println("Train completed");
}
Recognition method
public void findFaces(IplImage currentFrame) throws InterruptedException {
IntPointer labels = new IntPointer(1);
DoublePointer confidences = new DoublePointer(1);
opencv_core.CvMemStorage storage = new opencv_core.CvMemStorage().create();
opencv_core.CvSeq faces =
cvHaarDetectObjects(currentFrame, classifierFace, storage, 1.6, 8, opencv_objdetect.CV_HAAR_DO_CANNY_PRUNING);
int total = faces.total();
if(total > 0) {
System.out.println("Total faces: " + total);
for(int i = 0; i < total; i++) {
opencv_core.CvRect r = new opencv_core.CvRect(cvGetSeqElem(faces, i));
int x = r.x(); int y = r.y(); int w = r.width(); int h = r.height();
IplImage greyImg = IplImage.create(currentFrame.width(), currentFrame.height(), IPL_DEPTH_8U, 1);
cvCvtColor(currentFrame, greyImg, CV_RGB2GRAY);//resize
rectangle(cvarrToMat(currentFrame), new Rect(x, y, w, h), new Scalar(0, 255, 0, 0), 2, 0, 0);
faceRecognizer.predict(cvarrToMat(greyImg), labels, confidences);
int label = labels.get(0);
double confidence = confidences.get(0);
String labelInfo = faceRecognizer.getLabelInfo(label).toString();
System.out.println("---------");
System.out.println("Person: " + i);
System.out.println("label = " + label);
System.out.println("confidence = " + confidence);
}
}
}
Related
I am working on some examples I found online to understand more about Yolo usage in java. I got this code that I edited a little and it can detect objects in videos but now I want to do it with Pictures and I am kinda struggling with it. I would appreciate if anyone can show me how to edit it or has an advise or a method to solve it .
The code:
`class yolo {
private static List<String> getOutputNames(Net net) {
List<String> names = new ArrayList<>();
List<Integer> outLayers = net.getUnconnectedOutLayers().toList();
List<String> layersNames = net.getLayerNames();
outLayers.forEach((item) -> names.add(layersNames.get(item - 1)));//unfold and create R-CNN layers from the loaded YOLO model//
return names;
}
public static void main(String[] args) throws InterruptedException {
System.load("C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\opencv\\build\\java\\x64\\opencv_java400.dll");
System.out.println("Library Loaded");
System.load("C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\opencv\\build\\java\\x64\\opencv_java400.dll");
String modelWeights = "C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\yolov3.weights";
String modelConfiguration = "C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\yolov3.cfg.txt";
String filePath = "C:\\Users\\LENOVO\\Desktop\\cows.mp4";
VideoCapture cap = new VideoCapture(filePath);
Mat frame = new Mat();
Mat dst = new Mat ();
//cap.read(frame);
JFrame jframe = new JFrame("Video");
JLabel vidpanel = new JLabel();
jframe.setContentPane(vidpanel);
jframe.setSize(600, 600);
jframe.setVisible(true);
Net net = Dnn.readNetFromDarknet(modelConfiguration, modelWeights);
//Thread.sleep(5000);
//Mat image = Imgcodecs.imread("D:\\yolo-object-detection\\yolo-object-detection\\images\\soccer.jpg");
Size sz = new Size(288,288);
List<Mat> result = new ArrayList<>();
List<String> outBlobNames = getOutputNames(net);
while (true) {
if (cap.read(frame)) {
Mat blob = Dnn.blobFromImage(frame, 0.00392, sz, new Scalar(0), true, false);
net.setInput(blob);
net.forward(result, outBlobNames);
// outBlobNames.forEach(System.out::println);
// result.forEach(System.out::println);
float confThreshold = 0.6f;
List<Integer> clsIds = new ArrayList<>();
List<Float> confs = new ArrayList<>();
List<Rect> rects = new ArrayList<>();
for (int i = 0; i < result.size(); ++i)
{
Mat level = result.get(i);
for (int j = 0; j < level.rows(); ++j)
{
Mat row = level.row(j);
Mat scores = row.colRange(5, level.cols());
Core.MinMaxLocResult mm = Core.minMaxLoc(scores);
float confidence = (float)mm.maxVal;
Point classIdPoint = mm.maxLoc;
if (confidence > confThreshold)
{
int centerX = (int)(row.get(0,0)[0] * frame.cols());
int centerY = (int)(row.get(0,1)[0] * frame.rows());
int width = (int)(row.get(0,2)[0] * frame.cols());
int height = (int)(row.get(0,3)[0] * frame.rows());
int left = centerX - width / 2;
int top = centerY - height / 2;
clsIds.add((int)classIdPoint.x);
confs.add((float)confidence);
rects.add(new Rect(left, top, width, height));
}
}
}
float nmsThresh = 0.5f;
MatOfFloat confidences = new MatOfFloat(Converters.vector_float_to_Mat(confs));
Rect[] boxesArray = rects.toArray(new Rect[0]);
MatOfRect boxes = new MatOfRect(boxesArray);
MatOfInt indices = new MatOfInt();
Dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThresh, indices);
int [] ind = indices.toArray();
int j=0;
for (int i = 0; i < ind.length; ++i)
{
int idx = ind[i];
Rect box = boxesArray[idx];
Imgproc.rectangle(frame, box.tl(), box.br(), new Scalar(0,0,255), 2);
//i=j;
System.out.println(idx);
}
// Imgcodecs.imwrite("D://out.png", image);
//System.out.println("Image Loaded");
ImageIcon image = new ImageIcon(Mat2bufferedImage(frame));
vidpanel.setIcon(image);
vidpanel.repaint();
// System.out.println(j);
//System.out.println("Done");
}
}
}
// }
private static BufferedImage Mat2bufferedImage(Mat image) {
MatOfByte bytemat = new MatOfByte();
Imgcodecs.imencode(".jpg", image, bytemat);
byte[] bytes = bytemat.toArray();
InputStream in = new ByteArrayInputStream(bytes);
BufferedImage img = null;
try {
img = ImageIO.read(in);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return img;
}`
`
I am compare one image whith a group of images to know if are similary
im using opencv 4.1.2 in java code. I need to know basically if one image looks like another one
this code I found it on internet but I cant execute it
public static void main(String[] args) {
try {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
String trainingDir = "C:/PROYECTO/RECONOCIMIENTO FACIAL/fotos";
BufferedImage img = null;
img = ImageIO.read(new File("C:/PROYECTO/RECONOCIMIENTO FACIAL/cara2.jpg"));
OpenCVFrameConverter.ToMat cv = new OpenCVFrameConverter.ToMat();
Mat testImage = cv.convertToMat(new Java2DFrameConverter().convert(img));
File root = new File(trainingDir);
FilenameFilter imgFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
name = name.toLowerCase();
return name.endsWith(".jpg") || name.endsWith(".pgm") || name.endsWith(".png");
}
};
File[] imageFiles = root.listFiles(imgFilter);
MatVector images = new MatVector(imageFiles.length);
Mat labels = new Mat(imageFiles.length, 1, CvType.CV_32SC1);
IntBuffer labelsBuf = labels.createBuffer();
int counter = 0;
for (File image : imageFiles) {
BufferedImage img2 = null;
img2 = ImageIO.read(new File(image.getAbsolutePath()));
OpenCVFrameConverter.ToMat cv2 = new OpenCVFrameConverter.ToMat();
Mat img3 = cv2.convertToMat(new Java2DFrameConverter().convert(img2));
// img3 = new Mat(Imgcodecs.IMREAD_GRAYSCALE);
// Mat img = imread(image.getAbsolutePath(), Imgcodecs.IMREAD_GRAYSCALE);
int label = image.getName().codePointAt(0);
images.put(counter, img3);
labelsBuf.put(counter, label);
counter++;
}
FaceRecognizer faceRecognizer = FisherFaceRecognizer.create();
// FaceRecognizer faceRecognizer = EigenFaceRecognizer.create();
// FaceRecognizer faceRecognizer = LBPHFaceRecognizer.create();
faceRecognizer.train(images, labels);
IntPointer label = new IntPointer(1);
DoublePointer confidence = new DoublePointer(1);
faceRecognizer.predict(testImage, label, confidence);
int predictedLabel = label.get(0);
System.out.println("Predicted label: " + predictedLabel);
} catch (Exception e) {
e.printStackTrace();
}
}
give me this error:
OpenCV(4.1.2) C:\projects\javacpp-presets\opencv\cppbuild\windows-x86_64\opencv_contrib-4.1.2\modules\face\src\lbph_faces.cpp:406: error: (-5:Bad argument) This LBPH model is not computed yet. Did you call the train method? in function 'cv::face::LBPH::predict'
at org.bytedeco.opencv.opencv_face.FaceRecognizer.predict(Native Method)
at ec.reconocimiento.pk.testv2.main(testv2.java:60)
please tell me if you know where are the error or what are Im doing wrong
I want only images at their respective positions as in the pdf with its exact layout but I don't want text to render in it Is there any way to do it currently I am working in this way but text also coming in this way so is there any way to meet that requirement
File sourceFile=new File(pdfFile);
String fileName = sourceFile.getName().replace(".pdf", "");
int pageNumber = 1;
for (PDPage page : li)
{
BufferedImage image = page.convertToImage();
File outputfile = new File(imgDes + fileName +"_"+ pageNumber +".png");
System.out.println("Image Created -> "+ outputfile.getName());
ImageIO.write(image, "png", outputfile);
pageNumber++;
}
Derive a class from PageDrawer and override all methods that don't deal with images with empty, and then call drawPage(). I just overrode processTextPosition(), and didn't bother about lines, shapes etc but I think it is clear what I mean.
public class MyPageDrawer extends PageDrawer
{
public MyPageDrawer() throws IOException
{
}
#Override
protected void processTextPosition(TextPosition text)
{
}
// taken from PDPage.convertToImage, with extra parameter and one modification
static BufferedImage convertToImage(PDPage page, int imageType, int resolution) throws IOException
{
final Color TRANSPARENT_WHITE = new Color(255, 255, 255, 0);
final int DEFAULT_USER_SPACE_UNIT_DPI = 72;
PDRectangle cropBox = page.findCropBox();
float widthPt = cropBox.getWidth();
float heightPt = cropBox.getHeight();
float scaling = resolution / (float) DEFAULT_USER_SPACE_UNIT_DPI;
int widthPx = Math.round(widthPt * scaling);
int heightPx = Math.round(heightPt * scaling);
Dimension pageDimension = new Dimension((int) widthPt, (int) heightPt);
int rotationAngle = page.findRotation();
// normalize the rotation angle
if (rotationAngle < 0)
{
rotationAngle += 360;
}
else if (rotationAngle >= 360)
{
rotationAngle -= 360;
}
// swap width and height
BufferedImage retval;
if (rotationAngle == 90 || rotationAngle == 270)
{
retval = new BufferedImage(heightPx, widthPx, imageType);
}
else
{
retval = new BufferedImage(widthPx, heightPx, imageType);
}
Graphics2D graphics = (Graphics2D) retval.getGraphics();
graphics.setBackground(TRANSPARENT_WHITE);
graphics.clearRect(0, 0, retval.getWidth(), retval.getHeight());
if (rotationAngle != 0)
{
int translateX = 0;
int translateY = 0;
switch (rotationAngle)
{
case 90:
translateX = retval.getWidth();
break;
case 270:
translateY = retval.getHeight();
break;
case 180:
translateX = retval.getWidth();
translateY = retval.getHeight();
break;
default:
break;
}
graphics.translate(translateX, translateY);
graphics.rotate((float) Math.toRadians(rotationAngle));
}
graphics.scale(scaling, scaling);
PageDrawer drawer = new MyPageDrawer(); // MyPageDrawer instead of PageDrawer
drawer.drawPage(graphics, page, pageDimension);
drawer.dispose();
graphics.dispose();
return retval;
}
public static void main(String[] args) throws IOException
{
String filename = "......./blah.pdf";
// open the document
PDDocument doc = PDDocument.loadNonSeq(new File(filename), null);
List<PDPage> pages = doc.getDocumentCatalog().getAllPages();
for (int p = 0; p < pages.size(); ++p)
{
PDPage page = pages.get(p);
BufferedImage bim = convertToImage(page, BufferedImage.TYPE_INT_RGB, 300);
boolean b = ImageIOUtil.writeImage(bim, "page-" + (p + 1) + ".png", 300);
if (!b)
{
// error handling
}
}
doc.close();
}
}
Trying to add a text to a multi-paged (multiple image) tiff file.
to make the process the text is added correctly, but the image created is not equal to the original zoom (scale) of content changes. I do not know how to make it equal to the original image.
This is the code I use:
File file = new File("AA005E57.tif");
SeekableStream seekableStream = new FileSeekableStream(file);
BufferedImage bsrc = ImageIO.read(file);
ImageDecoder decoder = ImageCodec.createImageDecoder("tiff", seekableStream, null);
int numPages = decoder.getNumPages();
BufferedImage image[]= new BufferedImage[numPages];
for(int i=0;i<decoder.getNumPages();i++){
PlanarImage op1 = new NullOpImage(decoder.decodeAsRenderedImage(i), null, null, OpImage.OP_IO_BOUND);
BufferedImage pg1 = convertRenderedImage(op1);// (new BufferedImage(op1.getWidth(), op1.getHeight(),BufferedImage.TYPE_BYTE_BINARY));
image[i] = pg1;
Graphics2D g2 = image[i].createGraphics();
float pageWidthInch = image[i].getWidth() * 72 / 200;
float pageHeightInch = image[i].getHeight() * 72 / 100;
g2.scale(pageWidthInch, pageHeightInch);
if (i == 0 ){
Font font = new Font("Helvetica", Font.BOLD, 12);
g2.setColor(Color.black);
g2.setFont(font);
g2.drawString("RADICADO 1234567890214365-D", 25,pg1.getHeight()-25);
}
g2.drawImage(image[i], (image[i].getWidth()),image[i].getHeight() ,null);
g2.dispose();
}
save(image,"C:/Prueb-18.tif");
Code for convertRenderedImage:
if (img instanceof BufferedImage) {
return (BufferedImage)img;
}
ColorModel cm = img.getColorModel();
float pageWidthInch = img.getWidth() * 72 / 200;
float pageHeightInch = img.getHeight() * 72 / 100;
WritableRaster raster = cm.createCompatibleWritableRaster( (int)pageWidthInch,(int)pageHeightInch);
boolean isAlphaPremultiplied = cm.isAlphaPremultiplied();
Hashtable properties = new Hashtable();
String[] keys = img.getPropertyNames();
if (keys!=null) {
for (int i = 0; i < keys.length; i++) {
properties.put(keys[i], img.getProperty(keys[i]));
}
}
BufferedImage result = new BufferedImage(cm, raster, false, properties);
img.copyData(raster);
return result;
Code for Save method:
Iterator writers = ImageIO.getImageWritersByFormatName("TIFF");
if (writers == null || !writers.hasNext()) {
throw new RuntimeException("No writers are available.");
}
FileImageOutputStream fios = new FileImageOutputStream(new File(tif));
ImageWriter writer = (ImageWriter) writers.next();
writer.setOutput(fios);
writer.prepareWriteSequence(null);
ImageWriteParam param = writer.getDefaultWriteParam();
param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
param.setCompressionType("CCITT T.4");
for (int i = 0; i < b.length; i++) {
ImageTypeSpecifier imageType = ImageTypeSpecifier.createFromRenderedImage(b[i]);
IIOMetadata imageMetadata = writer.getDefaultImageMetadata(imageType, param);
imageMetadata = createImageMetadata(imageMetadata);
writer.writeToSequence(new IIOImage(b[i], null, imageMetadata),param);
}
writer.endWriteSequence();
writer.dispose();
writer = null;
fios.close();
}
Code for createImageMetadata:
char[] COMPRESSION = new char[] { (char) BaselineTIFFTagSet.COMPRESSION_CCITT_T_4 };
char[] INCH_RESOLUTION_UNIT = new char[] { 2 };
char[] BITS_PER_SAMPLE = new char[] { 1 };
long[][] X_DPI_RESOLUTION = new long[][] { { 200, 1 } };
long[][] Y_DPI_RESOLUTION = new long[][] { { 200, 1 } };
TIFFDirectory ifd = TIFFDirectory.createFromMetadata(imageMetadata);
BaselineTIFFTagSet base = BaselineTIFFTagSet.getInstance();
TIFFTag tagResUnit = base.getTag(BaselineTIFFTagSet.TAG_RESOLUTION_UNIT);
TIFFTag tagXRes = base.getTag(BaselineTIFFTagSet.TAG_X_RESOLUTION);
TIFFTag tagYRes = base.getTag(BaselineTIFFTagSet.TAG_Y_RESOLUTION);
TIFFTag tagBitSample = base.getTag(BaselineTIFFTagSet.TAG_BITS_PER_SAMPLE);
TIFFTag tagRowStrips = base.getTag(BaselineTIFFTagSet.TAG_ROWS_PER_STRIP);
TIFFTag tagCompression = base.getTag(BaselineTIFFTagSet.TAG_COMPRESSION);
TIFFField fieldResUnit = new TIFFField(tagResUnit, TIFFTag.TIFF_SHORT, 1, INCH_RESOLUTION_UNIT);
TIFFField fieldXRes = new TIFFField(tagXRes, TIFFTag.TIFF_RATIONAL, 1, X_DPI_RESOLUTION);
TIFFField fieldYRes = new TIFFField(tagYRes, TIFFTag.TIFF_RATIONAL, 1, Y_DPI_RESOLUTION);
TIFFField fieldBitSample = new TIFFField(tagBitSample, TIFFTag.TIFF_SHORT, 1, BITS_PER_SAMPLE);
TIFFField fieldCompression = new TIFFField(tagCompression, TIFFTag.TIFF_SHORT, 1, COMPRESSION);
ifd.addTIFFField(fieldResUnit);
ifd.addTIFFField(fieldXRes);
ifd.addTIFFField(fieldYRes);
ifd.addTIFFField(fieldBitSample);
ifd.addTIFFField(fieldCompression);
return ifd.getAsMetadata();
}
The following code is inconsistent:
float pageWidthInch = image[i].getWidth() * 72 / 200;
float pageHeightInch = image[i].getHeight() * 72 / 100;
Is your source DPI 200x100? Be consistent on input; choose 100 or 200. This might be the source of your problem, especially since you save both as 200 for output. Both should likely be the same value, unless your source uses a different DPI for height and width.
Note that this code appears twice in your submitted code above.
If you need to, use AsTiffTagViewer (free application I have found invaluable for examining TIFF files) on your source image to confirm the appropriate values of each page.
Alternately after further review, you do this:
g2.scale(pageWidthInch, pageHeightInch);
Have you considered leaving out this call to g2.scale()?
I need to add animated gif in form of spinner, into some SWT widget like for example Label. This Label will be as default labelSpiner.setEnabled(false). When I start some long duration operation this Label with spinner will show (labelSpinner.setEnabled(true))in right corner of window.
Normaly Image can be added to Label by labelSpinner.setImage(arg0)
If I add this SPINNER.GIF normal way into Label, it wont animate, its only static Image.
Does anybody know how to add animated gif (for example some spinner) into SWT based JAVA SE app widget? I browsed many examples but many of them were useless, and those good were too complicated.
I would like to do it very simple.
Can somebody help?
Did you try the Eclipse article about SWT Images?
This part will load the image and display it in a Canvas:
ImageLoader loader = new ImageLoader();
loader.load(getClass().getResourceAsStream("Idea_SWT_Animation.gif"));
Canvas canvas = new Canvas(shell,SWT.NONE);
image = new Image(display,loader.data[0]);
int imageNumber;
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener(){
public void paintControl(PaintEvent event){
event.gc.drawImage(image,0,0);
}
});
And this part updates the gif:
Thread thread = new Thread(){
public void run(){
long currentTime = System.currentTimeMillis();
int delayTime = loader.data[imageNumber].delayTime;
while(currentTime + delayTime * 10 > System.currentTimeMillis()){
// Wait till the delay time has passed
}
display.asyncExec(new Runnable(){
public void run(){
// Increase the variable holding the frame number
imageNumber = imageNumber == loader.data.length-1 ? 0 : imageNumber+1;
// Draw the new data onto the image
ImageData nextFrameData = loader.data[imageNumber];
Image frameImage = new Image(display,nextFrameData);
gc.drawImage(frameImage,nextFrameData.x,nextFrameData.y);
frameImage.dispose();
canvas.redraw();
}
});
}
};
shell.open();
thread.start();
After trying at least 3 different animated GIF examples, none of which worked, I started working on my own, based mainly on the answer above.
Here is a complete running example including:
a base64 decoder (courtesy of http://www.source-code.biz/base64coder/java/ )
spinner GIF
main method
Remove the main method, base64 methods, and image data, and you will have a working animated GIF canvas.
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.ImageLoader;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
public class AnimatedGif extends Canvas {
public static void main(String[] args) throws IOException {
byte[] d = decode(b64.toCharArray(), 0, b64.length());
Display display = new Display ();
Shell shell = new Shell(display);
shell.setBounds(0, 0, 100, 100);
AnimatedGif gif = new AnimatedGif(shell, SWT.NONE);
gif.setLocation(10,10);
gif.setSize(16, 16);
gif.load(new java.io.ByteArrayInputStream(d));
shell.open();
gif.animate();
while (!shell.isDisposed ()) {
if (!display.readAndDispatch ()) display.sleep ();
}
display.dispose ();
}
private final ImageLoader loader = new ImageLoader();
private int img = 0;
private volatile boolean animating = false;
private Thread animateThread;
public AnimatedGif(Composite parent, int style) {
super(parent, style);
}
public void load (InputStream resource) throws IOException {
loader.load(resource);
}
public void animate() {
if (animateThread == null) {
animateThread = createThread();
animateThread.setDaemon(true);
}
if (animateThread.isAlive())
return;
animateThread.start();
}
public void stop() {
animating = false;
if (animateThread != null)
try {
animateThread.join();
animateThread = null;
} catch (InterruptedException e) {
// do nothing
}
}
private Thread createThread() {
return new Thread() {
long currentTime = System.currentTimeMillis();
final Display display = getParent().getDisplay();
public void run() {
animating = true;
while(animating) {
img = (img == loader.data.length-1) ? 0 : img + 1;
int delayTime = Math.max(50, 10*loader.data[img].delayTime);
long now = System.currentTimeMillis();
long ms = Math.max(currentTime + delayTime - now, 5);
currentTime += delayTime;
try {
Thread.sleep(ms);
} catch(Exception e) {
return;
}
if (!display.isDisposed())
display.asyncExec(new Runnable() {
#Override
public void run() {
ImageData nextFrameData = loader.data[img];
Image frameImage = new Image(display, nextFrameData);
new GC(AnimatedGif.this).drawImage(frameImage, nextFrameData.x, nextFrameData.y);
frameImage.dispose();
//canvas.redraw();
}
});
}
Display.getDefault().asyncExec(new Runnable() {
#Override
public void run() {
new GC(AnimatedGif.this).fillRectangle(
0,
0,
getBounds().width,
getBounds().height);
}
});
}
};
}
private static final char[] map1 = new char[64];
static {
int i = 0;
for (char c = 'A'; c <= 'Z'; c++)
map1[i++] = c;
for (char c = 'a'; c <= 'z'; c++)
map1[i++] = c;
for (char c = '0'; c <= '9'; c++)
map1[i++] = c;
map1[i++] = '+';
map1[i++] = '/';
}
private static final byte[] map2 = new byte[128];
static {
for (int i = 0; i < map2.length; i++)
map2[i] = -1;
for (int i = 0; i < 64; i++)
map2[map1[i]] = (byte) i;
}
public static byte[] decode(char[] in, int iOff, int iLen) {
if (iLen % 4 != 0)
throw new IllegalArgumentException(
"Length of Base64 encoded input string is not a multiple of 4.");
while (iLen > 0 && in[iOff + iLen - 1] == '=')
iLen--;
int oLen = (iLen * 3) / 4;
byte[] out = new byte[oLen];
int ip = iOff;
int iEnd = iOff + iLen;
int op = 0;
while (ip < iEnd) {
int i0 = in[ip++];
int i1 = in[ip++];
int i2 = ip < iEnd ? in[ip++] : 'A';
int i3 = ip < iEnd ? in[ip++] : 'A';
if (i0 > 127 || i1 > 127 || i2 > 127 || i3 > 127)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int b0 = map2[i0];
int b1 = map2[i1];
int b2 = map2[i2];
int b3 = map2[i3];
if (b0 < 0 || b1 < 0 || b2 < 0 || b3 < 0)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int o0 = (b0 << 2) | (b1 >>> 4);
int o1 = ((b1 & 0xf) << 4) | (b2 >>> 2);
int o2 = ((b2 & 3) << 6) | b3;
out[op++] = (byte) o0;
if (op < oLen)
out[op++] = (byte) o1;
if (op < oLen)
out[op++] = (byte) o2;
}
return out;
}
static String b64 =
"R0lGODlhEAAQAPf/AMra5KO9zOzz9s3b5MDS3aS+zZOwwsrZ4+bt89jm7IKftuLr" +
"8ZCswH6bs9vn7qrD0+Ts8oqnvYKetomlu8PV4t7p79Pg6dzp79vm7q/F1aC6zdTi" +
"6bbL2XqYsdDf55SwxK/H1ajC0Yajudzp7q7E087c59Xi6niWr8XY4qW/0LnM2aO9" +
"zoyov7bK2djk7ICbtIOhtt7p7pq1yc3b5sDU38DR39vm7LDH1afA0Yqovebu85Gt" +
"wouovsvZ5oCdtunv9IaiuXyZscrb5qnC0Zezxdjj64WiuZ65y7vQ3N/p8OLq8avE" +
"1MTX4o6rwJaxxLnN29jm7a7F1dvm7Y2qwH2bs7zQ3Jizx9Df6djj7HmXr+Tt8rvO" +
"26/E1LrO3KC5zKW+z8/e577Q3oCctYShubTK2HyasZGtwHuYsdTg6enw89/p76zE" +
"04iku+Lr8LXJ2JKvw+Xt8tHf583b57vP3pu2ycjY45m0yNvp7qzE1bHI13qZsaK8" +
"zn2aspq0yODp8Nfi6svb5oaiuuDp76vE09Hf6ZWzxMfY4qnC087e5+rx9eLs8pSv" +
"xL/R3rPI2Nfj64ajuq7E1Y2pv7rQ2+Lp8J65yarC1JKvxLzR3nybsrzQ3unw9HmY" +
"sLvR3LPI17nO28rY5Iqlvc3c5urx9Iakus/f53iVrtPi6cnY436btNTi6tfi67PK" +
"1rHI2H6bspy3yJ63ydXi7JezxKjA0ZGuwNjk64CbtazE0Zezx8DR3NXi68TX4Nvn" +
"7e7097vP24CctH2atIqlvN7o7tPh6Nzn7JGvwK/F1MbX4dHg6MfY4b7S3c/e5n2b" +
"srjN2ezy9eDq78rb4svb473R2+Ts8cHV3r7R3eXu8rfN2JSvwo+tv9/p7pSxw6C7" +
"ytbj6cTX36zD1NHg5vL3+KnB0Onx9cPV35y4yLPJ1dnl7LnO2cHU3avD0bnP2sDV" +
"4pKvwdfk7O7z99Xi6XqXsNTi6N3o7sjY4ubv8s7c5qvE0env842svuLs8Z64yJm0" +
"xZq2x46svuLq8P///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQFAAD/ACwAAAAAEAAQ" +
"AAAIvQD/CRR4poEEf0q0DFz478SZFwqAxPC3AAEChqU6GAQSwcHEihcFntAoQUQE" +
"BgkcVKCI4IdAkiYZfCiScmXFfzBzMFhYk6K/jScZ/uspSEFMJ0KxpIxxAQgPBk5k" +
"CDVRBIoDDE8/yNAg1MIGmglkyvDy5cHCEh4smIC14d/WLyGGQGpBA4UQtBYsCDyy" +
"AseDByBaqKhhqAfaKwNTPIiyBAQZFUjqGmY4KEMGEG5UdAlDgYlQgQ9IdOJQhRPD" +
"gAAh+QQFAAD/ACwAAAAAEAAQAAAI1gD/CRR4IkiZBjx0DFwoMIueBj58EBGlRAtD" +
"h1TEKAikgJUOfxAWbsKk0QgbFjmEJEkiEIFAVBLGjMoxZUcfG1AEhvz3YiOoSAz/" +
"3VHzD8EPIxNYMPgQVMqFSRAWsGjyhoidoAmkVFjgb8eHPq80BMWS4I4fQZboaFiB" +
"I6gJFxhGXDiyJ4WsIZUWItrwxwYGF/9ShHjwIEOLGkwAkbJgAgvgwHjwLLnB4QmF" +
"HnI8oNmwYSCJDCDycPBUA8UMMHGuMHxAQvSTTBTqzBASVCCIFlWQ1KDAMCAAIfkE" +
"BQAA/wAsAAAAABAAEAAACMsA/wkUeKZDmSANfAxc+K8BlRe+FNRSMIbKMoYwFCjI" +
"CAOGEY1iqgyMMIHkhJIRgE14JImXEoEMdpiJyYBBpBxN5mjyB+GfEytWiNyyspCR" +
"ohE8f2jQ4GUpw38JLvBE8CVFChxDnkadOmTQoRCQtEqFoKNYlAdrMohNyiVDlBst" +
"5ix0lAAphAX/uNwgoyIMDQpMdJWAgrSNQC6NODyZQ4NJnVAWCFeYNLDFli6MDM3w" +
"gCZXgl0XGFaRVAPFZjQmHEl5KhCaaQ+EKDAMCAAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"2wD/CRRIJUiHLCdODFz47xEMCT5eNOBjMAjDKTkisBFh5J+CiK3KDFy06I2ZJix4" +
"TJggYowCPr8EHqEjg4iTDzuaTGExQYEVQP9WCN3j5cjCKXYuIfihZEiIEDhwMBSo" +
"KMYCLSSiHAqxZKoNB0kW/MvAZQmJDFP/jfi3AMEaEDcacZgqZSACEjfIcOjCZKEL" +
"DGr+IdDxz42KJ0hQCEFkQVUCB34WEP6nYssWGqfkxDH1B8MIfwv8DUQShsKnEoQ2" +
"FMEQI4lohhR6gLFg4q+DCmn/zQBzZUMqGwwDAgAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"uAD/CRTIgAcQBQ06DFz474iMDwVFSEh4gmGKFRoeMogAZGKHUgMHPcDxxYsMJxsl" +
"UhSYIcqDEF8yosyh8sy/BxmWPBjyYGFKCS/OkABBFBLDfxsPNujkhkyLFkchcpTA" +
"QYVVGkeLJHAQw1+VLkhqoDi6tasSTmFoGBJSYuHWCv4WaPlHAUWPEh4smMDyNi4C" +
"gUzu5t2g1QHcBQj+CsSrtwgUw34VD7xiAVZZvz+OCtyQAMMFQf4YBgQAIfkEBQAA" +
"/wAsAAAAABAAEAAACNYA/wkUeMTSDhZGXgxc+C9Fij10PjSZoADVJoYk8IRIoaHP" +
"GxaBJGDKMvBBBjwPZK14RYQBqDFi9AwkAWLJgyE4NNj5EGmUAiokQeTJcyNDJYY5" +
"jMQ80YKD0xYM/01ho6BBkCpPPD2pEfVgIB9lkGSqQYFJ1D45FPhoQBZFD0BRbQhh" +
"RYQHhToz5JBCxBBKEh2idPybAcaDhQ0msCSQcudfEn9KBAqJg8bEHxeLL6gRCEHL" +
"wCsbsNjAcKfCpH8Qov7b4ALDCD8LICD4N1u1iwuC/C34wTAgACH5BAUAAP8ALAAA" +
"AAAQABAAAAjNAP8JFMil2JAvGpwMXPiPC5cMUQal0GCFQQSGLRrdiPLgUAovVnZM" +
"gDGwyhYOZG6sCYFDAxEzERQ0ECipyxMVLTJAGqLhFoMJCqj8o1CD0ZwwcxaGHPni" +
"DAoUhmjQYPjvpwJfHWZoZUKBKkwYCsp4GFunK8NIEWDUCkIIDZpQuqjmAGZEJgUT" +
"uSyUcLSQUROgY8b8c5QACpQEiBErmvMoqEApu0ZIvkD5wghNksQsG3ihgr/PoD/z" +
"qsJwUhsIqFNDUEJV4IIfOhAg+MEwIAAh+QQFAAD/ACwAAAAAEAAQAAAI1wD/CRRI" +
"Yk0GEkNWDFz4T4WbGyC4RAmx4sgihki2qCBzY8khinQWTRlIIcyWJxwakQiBY4+M" +
"NzkeCaRAgQaSLhwyLMHhhYiZCDD+zejx6RQKJguPOGnCRgIVMGBKyBHC8N8HFiJ8" +
"BLligVAcRFV38DDyosMGExtMWajaZMK/BllSuSjyR1XVKRMU8DlhA4PfBC4WTsGq" +
"9cQ/BzFGOMAg5Z+Nf3YmjCkrsEISf37U/FOs6JICBa2CDPS3YMHAJDEQWOFThqE/" +
"HQj+lV7wA9CvqgJhx9aihGFAACH5BAUAAP8ALAAAAAAQABAAAAjAAP8JFMipCodO" +
"JB4MXPiPCYUwXVS4AZEhwyCGJXqgoIFEBRkQS6I8SDHwioeMhmqoaAHiwQMcK44I" +
"tGDhpJCNLSANCfFFg4x/G2CZqFli4YMvXmR8YJAgQZENFhj+87mUBwYHUIqYkCrD" +
"CQMeQC7EcJAAi1SvEUQoEOSvAlmpDCIAkdDA34K2bwcyyPGPbod/CO66dbo0Ld2B" +
"PwLjTRBXhN8TAxEgEDhWrt9SDCXfjQFEwYszkKX+06LEH90zDAMCACH5BAkAAP8A" +
"LAAAAAAQABAAAAjWAP8JFEihBpIqLUAMXPhPyIw6FDI94ZCHxAOGV+KAmYGihieK" +
"IDKQGLhhAxoPcnpQmHhjCR48KQS6wGLCAilATGq0yPDgQYiYLjDY+LMB0cJKQ2Sl" +
"2HPkwggMLkww/IdjhQY6lgT5uZMAy1QNr/p82OFvQQUpCabaIfKmCYsFECZdkDL1" +
"AwMWE4z8QPBPzZ2pkUAFUvBCIASBUGz02TElx6gxElAJ5PsvSRIhOViwMaJADKZN" +
"CyH408FKwWAxVPRkYahFiSgiPnw0UD11oA4eDcoEOcEwIAAh+QQJAAD/ACwAAAAA" +
"EAAQAAAIyAD/CRw4zAYtYdwGKvyXz1m2YAaF1XsGbWE1aW3UQKQVb8CBA8YG3qMH" +
"Z4E8c++OKQMwD9k4XALTpClp7tsxD8oGIOs2DaamRDPbOFMIwBi6ZARwuQMKp83C" +
"fwSiVUEqgKnThdTU9YrWq5nVp73OMWNmzWvQp6tWlUv7j6m4AQpt2Utny1a4tmnc" +
"AQhAaVuAAAUABxaoCdwzfq70UaJEzpUrSo8HQjNQqNC+WLEKZda8cNYsIoWuFTLA" +
"bhaxpwK1EWOADV+/hQEBACH5BAUAAP8ALAAAAAAQABAAAAjcAP8JFGgj1YYrYGYM" +
"XPivggMMLkxYANODAsN//pLEwFBkA6ESnyiEQTLQ3wJ/IzD8MRVHzikaW7aoEKhj" +
"gR8HCVRZQCQEBZInKtz804HAnxqIC5l04UDmBgkEEPxdkHKRQ6MbINYgODkiwcUM" +
"JJZwybA1iQMbF5eEOBSFhJYFMRR5ZYgDR4gQQ5T8QHDJzpSFR7zsWUH4HyArCiaw" +
"mNJkxwcnRGTQOSLwFx8FY0RMmMCDRRMzbxYtGlim1QsfCmAYEcEmQo6/C4N0CMKn" +
"wWkJMB5dFHjiRJbZVBgGBAAh+QQFAAD/ACwAAAAAEAAQAAAIuQD/CRToT9AFDAk2" +
"DFz47weCBf5iOEgAy8IVhgge+qvgAEoRExY8lBiYESLHBEU2hCzRg4lAjSexgBTZ" +
"AwWFf1pMTlxYQoghGmE4KYm4kyGKGki6VCGKkuE/GiqicpAAJAKDD05btCDjplMD" +
"BVUZOIUEoiyJMy8kiLC68MGQB0syPPh3poHaHAycyNDwJcSDKBkEnuhgd21eGV6+" +
"4HgwaGApwlStfti7IgXDwYV5XJVxxKlAwmA1MwwIACH5BAUAAP8ALAAAAAAQABAA" +
"AAjVAP8JFPhjgT9BF1wMXPgPQUMIC/yMwOBiA0OBEP5NqnAHgw0sG64M1JLxn5oL" +
"UhK4+GMCTRwhApX4S/LvTkosJjZY8ABmxj8donQkgcIQESk5M+pQ4EGElRAbFwH1" +
"QEGhRgMfCnL0ucikaiYkZXwEYrHjYo0nnp5UCdJAAZspF1twmNvihB4xRnIwrJTh" +
"Rp48IP5loaJgVKQPdjTgGPJgCQgSA++OAcWAyKsVsh7gyfBgYBZMEsa+6aMhRQg8" +
"kBduQqVgQpMPdPakSHFR4AsjZC0dYRgQACH5BAUAAP8ALAAAAAAQABAAAAjRAP8J" +
"FFgBAQIdP2wMXPhPCYSHECG0GcaQBi9/GDNirNBuIA8xkjSNuEDywoiTuzYIpKLg" +
"0RxFCWLGhAIlgaN/D4womNCE0UJHJSzkMpHphQIjwFgw/KcrFBo0hCTUghHBy1JG" +
"dTx4CNVAAVUiS9cxmTGjRxlfO80spUHDEAoUE17AmLDDysIuYeYwqnHpH8sJDG5p" +
"CFGJSwsVT7pIEtg1ghkiGnCEWHODDIctWwbOreslxaEHUW40usEwAgMrGlIMipKB" +
"y5qlAp1o+DKExBeGAQEAIfkEBQAA/wAsAAAAABAAEAAACNcA/wkUqETLPwQIByr8" +
"xwPQjwUQD+rAsBATHysIYiQZCNHfwBWtFCi4pMjBiH9q/PhLkkBghxdjJtj5Z+Of" +
"FAwmYzj4xyaIDxEsmih0kQCDUSzw+CiYsGPhP1V/irhIlaXBvwmWnFowtcHElTIv" +
"jPD44FRIHEIWrvgESnahEDklwMygIoFNEydHFFJAcepTjzr/YEQwQ8QLjgfeOHRB" +
"QoMCBYFAcryRsQdHCBKNODzZEobDwCmL6KwIcWjJDTIqtiBZaObI6ChcQNxw48ap" +
"wBVDSGRYs2ZhQAA7";
}
here is sample code provided. Basically this is how it works. Unlike other image formats, Animated GIF will have setof ImageData ( like a frame in an animation). You will render this image data on Cavans with the delay that you want.
http://www.java2s.com/Code/Java/SWT-JFace-Eclipse/DisplayananimatedGIF.htm
This one is also working example code based upon Eclipse article about SWT. I was trying to make a slideshow or animated gif dialog from images located in a folder. posted here because it could be useful for somenone.
public class GifDialog extends Dialog {
Shell dialog;
private Canvas canvas;
int numberImage = 1;
private volatile boolean running = true;
final List<Image> imageCollection = new ArrayList<Image>();
DenemeDialog(Shell parent) {
super(parent);
}
public String open() {
Shell parent = getParent();
this.func();
dialog = new Shell(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL);
dialog.setSize(600, 400);
dialog.setText("Show Begins!!!");
Monitor primary = dialog.getDisplay().getPrimaryMonitor();
Rectangle bounds = primary.getBounds();
Rectangle rect = dialog.getBounds();
int x = bounds.x + (bounds.width - rect.width) / 2;
int y = bounds.y + (bounds.height - rect.height) / 2;
dialog.setLocation(x, y);
dialog.setLayout(new FillLayout());
final Canvas canvas = new Canvas(dialog, SWT.NONE);
final Image image = new Image(dialog.getDisplay(), imageCollection.get(
0).getImageData());
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener() {
public void paintControl(PaintEvent event) {
event.gc.drawImage(image, 0, 0);
}
});
Thread thread = new Thread() {
public void run() {
while (running) {
dialog.getDisplay().asyncExec(new Runnable() {
public void run() {
numberImage = numberImage == imageCollection.size() - 1
? 0 : numberImage + 1;
ImageData nextFrameData = imageCollection.get(
numberImage).getImageData();
Image frameImage = new Image(dialog.getDisplay(),
nextFrameData);
gc.drawImage(frameImage, nextFrameData.x,
nextFrameData.y);
frameImage.dispose();
canvas.redraw();
if (numberImage == 0)
try {
running = false;
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
}
};
dialog.open();
thread.start();
Display display = parent.getDisplay();
while (!dialog.isDisposed()) {
if (!display.readAndDispatch())
display.sleep();
}
return "After Dialog";
}
public void func() {
File path = new File("..\folder");
File[] files = path.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isFile()) { // this line weeds out other
// directories/folders
try {
ImageData imageData = new ImageData(
new ByteArrayInputStream(loadImage(files[i])));
final Image image = new Image(Display.getDefault(),
imageData);
imageCollection.add(image);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
public byte[] loadImage(File file) throws IOException {
BufferedImage image = ImageIO.read(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageIO.write(image, "jpg", bos);
return bos.toByteArray();
}
public Canvas getCanvas() {
return canvas;
}
public void setCanvas(Canvas canvas) {
this.canvas = canvas;
}
Note: None of the examples that rely on SWT's ImageLoader.class will work on GTK Linux SWT, as there is currently a bug that hard-codes the maximum frames to 32 and sets the delay between frames incorrectly.
See GTK ImageLoader.java
// Fix the number of GIF frames as GdkPixbufAnimation does not provide an API to
// determine number of frames.
int num_frames = 32;