Extract Images from PDF using pdfbox library java - java

I want only images at their respective positions as in the pdf with its exact layout but I don't want text to render in it Is there any way to do it currently I am working in this way but text also coming in this way so is there any way to meet that requirement
File sourceFile=new File(pdfFile);
String fileName = sourceFile.getName().replace(".pdf", "");
int pageNumber = 1;
for (PDPage page : li)
{
BufferedImage image = page.convertToImage();
File outputfile = new File(imgDes + fileName +"_"+ pageNumber +".png");
System.out.println("Image Created -> "+ outputfile.getName());
ImageIO.write(image, "png", outputfile);
pageNumber++;
}

Derive a class from PageDrawer and override all methods that don't deal with images with empty, and then call drawPage(). I just overrode processTextPosition(), and didn't bother about lines, shapes etc but I think it is clear what I mean.
public class MyPageDrawer extends PageDrawer
{
public MyPageDrawer() throws IOException
{
}
#Override
protected void processTextPosition(TextPosition text)
{
}
// taken from PDPage.convertToImage, with extra parameter and one modification
static BufferedImage convertToImage(PDPage page, int imageType, int resolution) throws IOException
{
final Color TRANSPARENT_WHITE = new Color(255, 255, 255, 0);
final int DEFAULT_USER_SPACE_UNIT_DPI = 72;
PDRectangle cropBox = page.findCropBox();
float widthPt = cropBox.getWidth();
float heightPt = cropBox.getHeight();
float scaling = resolution / (float) DEFAULT_USER_SPACE_UNIT_DPI;
int widthPx = Math.round(widthPt * scaling);
int heightPx = Math.round(heightPt * scaling);
Dimension pageDimension = new Dimension((int) widthPt, (int) heightPt);
int rotationAngle = page.findRotation();
// normalize the rotation angle
if (rotationAngle < 0)
{
rotationAngle += 360;
}
else if (rotationAngle >= 360)
{
rotationAngle -= 360;
}
// swap width and height
BufferedImage retval;
if (rotationAngle == 90 || rotationAngle == 270)
{
retval = new BufferedImage(heightPx, widthPx, imageType);
}
else
{
retval = new BufferedImage(widthPx, heightPx, imageType);
}
Graphics2D graphics = (Graphics2D) retval.getGraphics();
graphics.setBackground(TRANSPARENT_WHITE);
graphics.clearRect(0, 0, retval.getWidth(), retval.getHeight());
if (rotationAngle != 0)
{
int translateX = 0;
int translateY = 0;
switch (rotationAngle)
{
case 90:
translateX = retval.getWidth();
break;
case 270:
translateY = retval.getHeight();
break;
case 180:
translateX = retval.getWidth();
translateY = retval.getHeight();
break;
default:
break;
}
graphics.translate(translateX, translateY);
graphics.rotate((float) Math.toRadians(rotationAngle));
}
graphics.scale(scaling, scaling);
PageDrawer drawer = new MyPageDrawer(); // MyPageDrawer instead of PageDrawer
drawer.drawPage(graphics, page, pageDimension);
drawer.dispose();
graphics.dispose();
return retval;
}
public static void main(String[] args) throws IOException
{
String filename = "......./blah.pdf";
// open the document
PDDocument doc = PDDocument.loadNonSeq(new File(filename), null);
List<PDPage> pages = doc.getDocumentCatalog().getAllPages();
for (int p = 0; p < pages.size(); ++p)
{
PDPage page = pages.get(p);
BufferedImage bim = convertToImage(page, BufferedImage.TYPE_INT_RGB, 300);
boolean b = ImageIOUtil.writeImage(bim, "page-" + (p + 1) + ".png", 300);
if (!b)
{
// error handling
}
}
doc.close();
}
}

Related

how to use Yolo v3 with Java to detect objects (cows to be specific) in pictures:

I am working on some examples I found online to understand more about Yolo usage in java. I got this code that I edited a little and it can detect objects in videos but now I want to do it with Pictures and I am kinda struggling with it. I would appreciate if anyone can show me how to edit it or has an advise or a method to solve it .
The code:
`class yolo {
private static List<String> getOutputNames(Net net) {
List<String> names = new ArrayList<>();
List<Integer> outLayers = net.getUnconnectedOutLayers().toList();
List<String> layersNames = net.getLayerNames();
outLayers.forEach((item) -> names.add(layersNames.get(item - 1)));//unfold and create R-CNN layers from the loaded YOLO model//
return names;
}
public static void main(String[] args) throws InterruptedException {
System.load("C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\opencv\\build\\java\\x64\\opencv_java400.dll");
System.out.println("Library Loaded");
System.load("C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\opencv\\build\\java\\x64\\opencv_java400.dll");
String modelWeights = "C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\yolov3.weights";
String modelConfiguration = "C:\\Users\\LENOVO\\Desktop\\Java1\\Yolo\\yolov3.cfg.txt";
String filePath = "C:\\Users\\LENOVO\\Desktop\\cows.mp4";
VideoCapture cap = new VideoCapture(filePath);
Mat frame = new Mat();
Mat dst = new Mat ();
//cap.read(frame);
JFrame jframe = new JFrame("Video");
JLabel vidpanel = new JLabel();
jframe.setContentPane(vidpanel);
jframe.setSize(600, 600);
jframe.setVisible(true);
Net net = Dnn.readNetFromDarknet(modelConfiguration, modelWeights);
//Thread.sleep(5000);
//Mat image = Imgcodecs.imread("D:\\yolo-object-detection\\yolo-object-detection\\images\\soccer.jpg");
Size sz = new Size(288,288);
List<Mat> result = new ArrayList<>();
List<String> outBlobNames = getOutputNames(net);
while (true) {
if (cap.read(frame)) {
Mat blob = Dnn.blobFromImage(frame, 0.00392, sz, new Scalar(0), true, false);
net.setInput(blob);
net.forward(result, outBlobNames);
// outBlobNames.forEach(System.out::println);
// result.forEach(System.out::println);
float confThreshold = 0.6f;
List<Integer> clsIds = new ArrayList<>();
List<Float> confs = new ArrayList<>();
List<Rect> rects = new ArrayList<>();
for (int i = 0; i < result.size(); ++i)
{
Mat level = result.get(i);
for (int j = 0; j < level.rows(); ++j)
{
Mat row = level.row(j);
Mat scores = row.colRange(5, level.cols());
Core.MinMaxLocResult mm = Core.minMaxLoc(scores);
float confidence = (float)mm.maxVal;
Point classIdPoint = mm.maxLoc;
if (confidence > confThreshold)
{
int centerX = (int)(row.get(0,0)[0] * frame.cols());
int centerY = (int)(row.get(0,1)[0] * frame.rows());
int width = (int)(row.get(0,2)[0] * frame.cols());
int height = (int)(row.get(0,3)[0] * frame.rows());
int left = centerX - width / 2;
int top = centerY - height / 2;
clsIds.add((int)classIdPoint.x);
confs.add((float)confidence);
rects.add(new Rect(left, top, width, height));
}
}
}
float nmsThresh = 0.5f;
MatOfFloat confidences = new MatOfFloat(Converters.vector_float_to_Mat(confs));
Rect[] boxesArray = rects.toArray(new Rect[0]);
MatOfRect boxes = new MatOfRect(boxesArray);
MatOfInt indices = new MatOfInt();
Dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThresh, indices);
int [] ind = indices.toArray();
int j=0;
for (int i = 0; i < ind.length; ++i)
{
int idx = ind[i];
Rect box = boxesArray[idx];
Imgproc.rectangle(frame, box.tl(), box.br(), new Scalar(0,0,255), 2);
//i=j;
System.out.println(idx);
}
// Imgcodecs.imwrite("D://out.png", image);
//System.out.println("Image Loaded");
ImageIcon image = new ImageIcon(Mat2bufferedImage(frame));
vidpanel.setIcon(image);
vidpanel.repaint();
// System.out.println(j);
//System.out.println("Done");
}
}
}
// }
private static BufferedImage Mat2bufferedImage(Mat image) {
MatOfByte bytemat = new MatOfByte();
Imgcodecs.imencode(".jpg", image, bytemat);
byte[] bytes = bytemat.toArray();
InputStream in = new ByteArrayInputStream(bytes);
BufferedImage img = null;
try {
img = ImageIO.read(in);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return img;
}`
`

Get displayed size of an image in a pdf with PDFBox

I am trying to compute the percentage images within a PDF "occupy" on each page. I have the following code:
PDPageTree list = document.getPages();
int pageNumber = 0;
float imagePerPage = 0;
for (PDPage page : list) {
BufferedImage pageImage = renderer.renderImage(pageNumber, 2);
float pageWidth = pageImage.getWidth();
float pageHeight = pageImage.getHeight();
PDResources pdResources = page.getResources();
int i = 1;
for (COSName name : pdResources.getXObjectNames()) {
PDXObject object = pdResources.getXObject(name);
if (object instanceof PDImageXObject) {
PDImageXObject image = (PDImageXObject) object;
BufferedImage bufferedImage = image.getImage();
float imageWidth = bufferedImage.getWidth();
float imageHeight = bufferedImage.getHeight();
int sumr = 0;
int sumg = 0;
int sumb = 0;
for (int x = 0; x < imageWidth; x++) {
for (int y = 0; y < imageHeight; y++) {
Color pixel = new Color(bufferedImage.getRGB(x, y));
sumr += pixel.getRed();
sumg += pixel.getGreen();
sumb += pixel.getBlue();
}
}
int num = image.getWidth() * image.getHeight();
Color avg = new Color(sumr / num, sumg / num, sumb / num);
if (!new Color(0, 0, 0).equals(avg)) {
String filename = "extracted-image-" + i + ".png";
ImageIO.write(
image.getImage(),
"png",
new File(filename)
);
imagePerPage++;
}
i++;
}
}
System.out.println("Image per page ratio is: " + imagePerPage);
imagePerPage = 0;
pageNumber++;
}
However, bufferedImage.getWidth() and bufferedImage.getHeight() return the actual size of the image in pixels. How could I get the displayed size for each image?
UPDATE 1
I have tried using PrintImageLocations.java example for retrieving display image sizes. However, in case of an actual pdf, it seems to give wrong responses.
In case of this PDF whose mediabox has 612 in width and 792 as height, the scaled numbers for the images of 25 (as width) and 16.61 (as height) do not seem to be correct. After all, each image has at least a third of the total width.
did something useful for me using example present on pdfbox documentation site
package br.gov.pb.mp.framework.util.pdf;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.pdfbox.contentstream.PDFStreamEngine;
import org.apache.pdfbox.contentstream.operator.DrawObject;
import org.apache.pdfbox.contentstream.operator.Operator;
import org.apache.pdfbox.contentstream.operator.state.Concatenate;
import org.apache.pdfbox.contentstream.operator.state.Restore;
import org.apache.pdfbox.contentstream.operator.state.Save;
import org.apache.pdfbox.contentstream.operator.state.SetGraphicsStateParameters;
import org.apache.pdfbox.contentstream.operator.state.SetMatrix;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.graphics.PDXObject;
import org.apache.pdfbox.pdmodel.graphics.form.PDFormXObject;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
import org.apache.pdfbox.util.Matrix;
public class ImageSizeExtractor extends PDFStreamEngine {
private Map<String, float[]> pageImages;
public ImageSizeExtractor() throws IOException {
// preparing PDFStreamEngine
addOperator(new Concatenate());
addOperator(new DrawObject());
addOperator(new SetGraphicsStateParameters());
addOperator(new Save());
addOperator(new Restore());
addOperator(new SetMatrix());
pageImages = new HashMap<String, float[]>();
}
public Map<String, float[]> getPageImages() {
return pageImages;
}
public void setPageImages(Map<String, float[]> pageImages) {
this.pageImages = pageImages;
}
#Override
protected void processOperator(Operator operator, List<COSBase> operands) throws IOException {
String operation = operator.getName();
if ("Do".equals(operation)) {
COSName objectName = (COSName) operands.get(0);
// get the PDF object
PDXObject xobject = getResources().getXObject(objectName);
// check if the object is an image object
if (xobject instanceof PDImageXObject) {
PDImageXObject image = (PDImageXObject) xobject;
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
System.out.println("\nImage [" + objectName.getName() + "]");
Matrix ctmNew = getGraphicsState().getCurrentTransformationMatrix();
float imageXScale = ctmNew.getScalingFactorX();
float imageYScale = ctmNew.getScalingFactorY();
System.out.println("displayed size = " + imageXScale + ", " + imageYScale + " in user space units");
float[] xy = {imageXScale,imageYScale};
pageImages.put(objectName.getName(), xy);
} else if (xobject instanceof PDFormXObject) {
PDFormXObject form = (PDFormXObject) xobject;
showForm(form);
}
} else {
super.processOperator(operator, operands);
}
}
}
method that makes use of the above class.
public static boolean analyseImageEntirePagePdfAto(byte[] sourcePdf) throws Throwable {
boolean containsEntirePageImage = false;
PDDocument docAto = PDDocument.load(sourcePdf);
int p = 0;
PDPageTree pageTree = docAto.getPages();
if (!containsEntirePageImage) {
for (PDPage pagina : pageTree) {
p++;
PDFTextStripper reader = new PDFTextStripper();
reader.setStartPage(p);
reader.setEndPage(p);
String pageText = reader.getText(docAto);
pageText = pageText.replaceAll("\r\n", "");
if (pageText == "" || pageText == null) {
containsEntirePageImage = true;
break;
}
float ph = pagina.getMediaBox().getHeight();
float pw = pagina.getMediaBox().getWidth();
float pageArea = ph * pw;
ImageSizeExtractor imageSizeExtractor = new ImageSizeExtractor();
imageSizeExtractor.processPage(pagina);
if (!imageSizeExtractor.getPageImages().entrySet().isEmpty()) {
for (Map.Entry<String, float[]> entry : imageSizeExtractor.getPageImages().entrySet()) {
float[] imageMeasures = entry.getValue();
float imageArea = imageMeasures[0] * imageMeasures[1];
float imgPercent = (imageArea / pageArea) * 100;
if (imgPercent > 80) {
containsEntirePageImage = true;
break;
}
}
}
}
}
return containsEntirePageImage;
}
the processPage(PDPage page) method is in the PDFStreamEngine class

Improving accurary of face recognition with Javacv

I'm new in javacv. I'm trying to create a system for face recognition and faced to a problem of low accurary of recognition. I use EigenFaceRecognizer for identification and Georgia Tech Face Database as a database for photos. All my photos are with 640x480 resolution(I have 12 photos per person and 20 people in total).
Result is very unstable and it doesn't work sometimes. How can I increase level of recognition accuracy? Should I do any extra editting for photos or the problem is in my code?
Train method
public void train() {
File imagesDir = new File("C:\\Java_Eclipse\\FaceRecognitionWebcam\\src\\image");
FilenameFilter imgFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
name = name.toLowerCase();
return name.endsWith(".jpg") || name.endsWith(".pgm") || name.endsWith(".png");
}
};
File[] imageFiles = imagesDir.listFiles(imgFilter);
MatVector images = new MatVector(imageFiles.length);
Mat labels = new Mat(imageFiles.length, 1, CV_32SC1);
IntBuffer labelsBuf = labels.createBuffer();
int counter = 0;
for (File image : imageFiles) {
Mat img = imread(image.getAbsolutePath(), CV_LOAD_IMAGE_GRAYSCALE);
int label = Integer.parseInt(image.getName().split("\\-")[0]);
images.put(counter, img);
labelsBuf.put(counter, label);
counter++;
}
// FaceRecognizer faceRecognizer = FisherFaceRecognizer.create();
FaceRecognizer faceRecognizer = EigenFaceRecognizer.create();
// FaceRecognizer faceRecognizer = LBPHFaceRecognizer.create();
System.out.println("Train started");
faceRecognizer.train(images, labels);
faceRecognizer.save("C:\\Java_Eclipse\\FaceRecognitionWebcam\\src\\train_result_eigen.xml");
System.out.println("Train completed");
}
Recognition method
public void findFaces(IplImage currentFrame) throws InterruptedException {
IntPointer labels = new IntPointer(1);
DoublePointer confidences = new DoublePointer(1);
opencv_core.CvMemStorage storage = new opencv_core.CvMemStorage().create();
opencv_core.CvSeq faces =
cvHaarDetectObjects(currentFrame, classifierFace, storage, 1.6, 8, opencv_objdetect.CV_HAAR_DO_CANNY_PRUNING);
int total = faces.total();
if(total > 0) {
System.out.println("Total faces: " + total);
for(int i = 0; i < total; i++) {
opencv_core.CvRect r = new opencv_core.CvRect(cvGetSeqElem(faces, i));
int x = r.x(); int y = r.y(); int w = r.width(); int h = r.height();
IplImage greyImg = IplImage.create(currentFrame.width(), currentFrame.height(), IPL_DEPTH_8U, 1);
cvCvtColor(currentFrame, greyImg, CV_RGB2GRAY);//resize
rectangle(cvarrToMat(currentFrame), new Rect(x, y, w, h), new Scalar(0, 255, 0, 0), 2, 0, 0);
faceRecognizer.predict(cvarrToMat(greyImg), labels, confidences);
int label = labels.get(0);
double confidence = confidences.get(0);
String labelInfo = faceRecognizer.getLabelInfo(label).toString();
System.out.println("---------");
System.out.println("Person: " + i);
System.out.println("label = " + label);
System.out.println("confidence = " + confidence);
}
}
}

How to use the 'svm_toy' Applet example in LibSVM?

I'm using LIBSVM. In the download package is a svm_toy.java file. I could not find out how it works. Here is the source code:
import libsvm.*;
import java.applet.*;
import java.awt.*;
import java.util.*;
import java.awt.event.*;
import java.io.*;
/**
* SVM package
* #author unknown
*
*/
public class svm_toy extends Applet {
static final String DEFAULT_PARAM="-t 2 -c 100";
int XLEN;
int YLEN;
// off-screen buffer
Image buffer;
Graphics buffer_gc;
// pre-allocated colors
final static Color colors[] =
{
new Color(0,0,0),
new Color(0,120,120),
new Color(120,120,0),
new Color(120,0,120),
new Color(0,200,200),
new Color(200,200,0),
new Color(200,0,200)
};
class point {
point(double x, double y, byte value)
{
this.x = x;
this.y = y;
this.value = value;
}
double x, y;
byte value;
}
Vector<point> point_list = new Vector<point>();
byte current_value = 1;
public void init()
{
setSize(getSize());
final Button button_change = new Button("Change");
Button button_run = new Button("Run");
Button button_clear = new Button("Clear");
Button button_save = new Button("Save");
Button button_load = new Button("Load");
final TextField input_line = new TextField(DEFAULT_PARAM);
BorderLayout layout = new BorderLayout();
this.setLayout(layout);
Panel p = new Panel();
GridBagLayout gridbag = new GridBagLayout();
p.setLayout(gridbag);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.weightx = 1;
c.gridwidth = 1;
gridbag.setConstraints(button_change,c);
gridbag.setConstraints(button_run,c);
gridbag.setConstraints(button_clear,c);
gridbag.setConstraints(button_save,c);
gridbag.setConstraints(button_load,c);
c.weightx = 5;
c.gridwidth = 5;
gridbag.setConstraints(input_line,c);
button_change.setBackground(colors[current_value]);
p.add(button_change);
p.add(button_run);
p.add(button_clear);
p.add(button_save);
p.add(button_load);
p.add(input_line);
this.add(p,BorderLayout.SOUTH);
button_change.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_change_clicked(); button_change.setBackground(colors[current_value]); }});
button_run.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
button_clear.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_clear_clicked(); }});
button_save.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_save_clicked(input_line.getText()); }});
button_load.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_load_clicked(); }});
input_line.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
this.enableEvents(AWTEvent.MOUSE_EVENT_MASK);
}
void draw_point(point p)
{
Color c = colors[p.value+3];
Graphics window_gc = getGraphics();
buffer_gc.setColor(c);
buffer_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
window_gc.setColor(c);
window_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
}
void clear_all()
{
point_list.removeAllElements();
if(buffer != null)
{
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
repaint();
}
void draw_all_points()
{
int n = point_list.size();
for(int i=0;i<n;i++)
draw_point(point_list.elementAt(i));
}
void button_change_clicked()
{
++current_value;
if(current_value > 3) current_value = 1;
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
void button_run_clicked(String args)
{
// guard
if(point_list.isEmpty()) return;
svm_parameter param = new svm_parameter();
// default values
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.RBF;
param.degree = 3;
param.gamma = 0;
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 40;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
// parse options
StringTokenizer st = new StringTokenizer(args);
String[] argv = new String[st.countTokens()];
for(int i=0;i<argv.length;i++)
argv[i] = st.nextToken();
for(int i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
if(++i>=argv.length)
{
System.err.print("unknown option\n");
break;
}
switch(argv[i-1].charAt(1))
{
case 's':
param.svm_type = atoi(argv[i]);
break;
case 't':
param.kernel_type = atoi(argv[i]);
break;
case 'd':
param.degree = atoi(argv[i]);
break;
case 'g':
param.gamma = atof(argv[i]);
break;
case 'r':
param.coef0 = atof(argv[i]);
break;
case 'n':
param.nu = atof(argv[i]);
break;
case 'm':
param.cache_size = atof(argv[i]);
break;
case 'c':
param.C = atof(argv[i]);
break;
case 'e':
param.eps = atof(argv[i]);
break;
case 'p':
param.p = atof(argv[i]);
break;
case 'h':
param.shrinking = atoi(argv[i]);
break;
case 'b':
param.probability = atoi(argv[i]);
break;
case 'w':
++param.nr_weight;
{
int[] old = param.weight_label;
param.weight_label = new int[param.nr_weight];
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
}
{
double[] old = param.weight;
param.weight = new double[param.nr_weight];
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
}
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
param.weight[param.nr_weight-1] = atof(argv[i]);
break;
default:
System.err.print("unknown option\n");
}
}
// build problem
svm_problem prob = new svm_problem();
prob.l = point_list.size();
prob.y = new double[prob.l];
if(param.kernel_type == svm_parameter.PRECOMPUTED)
{
}
else if(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
if(param.gamma == 0) param.gamma = 1;
prob.x = new svm_node[prob.l][1];
for(int i=0;i<prob.l;i++)
{
point p = point_list.elementAt(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.y[i] = p.y;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[1];
x[0] = new svm_node();
x[0].index = 1;
int[] j = new int[XLEN];
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
{
x[0].value = (double) i / XLEN;
j[i] = (int)(YLEN*svm.svm_predict(model, x));
}
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(0,0,0,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(0,0,0,YLEN-1);
int p = (int)(param.p * YLEN);
for(int i=1;i<XLEN;i++)
{
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(i,0,i,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(i,0,i,YLEN-1);
buffer_gc.setColor(colors[5]);
window_gc.setColor(colors[5]);
buffer_gc.drawLine(i-1,j[i-1],i,j[i]);
window_gc.drawLine(i-1,j[i-1],i,j[i]);
if(param.svm_type == svm_parameter.EPSILON_SVR)
{
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
window_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
window_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
}
}
}
else
{
if(param.gamma == 0) param.gamma = 0.5;
prob.x = new svm_node [prob.l][2];
for(int i=0;i<prob.l;i++)
{
point p = point_list.elementAt(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.x[i][1] = new svm_node();
prob.x[i][1].index = 2;
prob.x[i][1].value = p.y;
prob.y[i] = p.value;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[2];
x[0] = new svm_node();
x[1] = new svm_node();
x[0].index = 1;
x[1].index = 2;
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
for (int j = 0; j < YLEN ; j++) {
x[0].value = (double) i / XLEN;
x[1].value = (double) j / YLEN;
double d = svm.svm_predict(model, x);
if (param.svm_type == svm_parameter.ONE_CLASS && d<0) d=2;
buffer_gc.setColor(colors[(int)d]);
window_gc.setColor(colors[(int)d]);
buffer_gc.drawLine(i,j,i,j);
window_gc.drawLine(i,j,i,j);
}
}
draw_all_points();
}
void button_clear_clicked()
{
clear_all();
}
void button_save_clicked(String args)
{
FileDialog dialog = new FileDialog(new Frame(),"Save",FileDialog.SAVE);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
try {
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(filename)));
int svm_type = svm_parameter.C_SVC;
int svm_type_idx = args.indexOf("-s ");
if(svm_type_idx != -1)
{
StringTokenizer svm_str_st = new StringTokenizer(args.substring(svm_type_idx+2).trim());
svm_type = atoi(svm_str_st.nextToken());
}
int n = point_list.size();
if(svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
{
for(int i=0;i<n;i++)
{
point p = point_list.elementAt(i);
fp.writeBytes(p.y+" 1:"+p.x+"\n");
}
}
else
{
for(int i=0;i<n;i++)
{
point p = point_list.elementAt(i);
fp.writeBytes(p.value+" 1:"+p.x+" 2:"+p.y+"\n");
}
}
fp.close();
} catch (IOException e) { System.err.print(e); }
}
void button_load_clicked()
{
FileDialog dialog = new FileDialog(new Frame(),"Load",FileDialog.LOAD);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
clear_all();
try {
BufferedReader fp = new BufferedReader(new FileReader(filename));
String line;
while((line = fp.readLine()) != null)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
if(st.countTokens() == 5)
{
byte value = (byte)atoi(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
st.nextToken();
double y = atof(st.nextToken());
point_list.addElement(new point(x,y,value));
}
else if(st.countTokens() == 3)
{
double y = atof(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
point_list.addElement(new point(x,y,current_value));
}else
break;
}
fp.close();
} catch (IOException e) { System.err.print(e); }
draw_all_points();
}
protected void processMouseEvent(MouseEvent e)
{
if(e.getID() == MouseEvent.MOUSE_PRESSED)
{
if(e.getX() >= XLEN || e.getY() >= YLEN) return;
point p = new point((double)e.getX()/XLEN,
(double)e.getY()/YLEN,
current_value);
point_list.addElement(p);
draw_point(p);
}
}
public void paint(Graphics g)
{
// create buffer first time
if(buffer == null) {
buffer = this.createImage(XLEN,YLEN);
buffer_gc = buffer.getGraphics();
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
g.drawImage(buffer,0,0,this);
}
public Dimension getPreferredSize() { return new Dimension(XLEN,YLEN+50); }
public void setSize(Dimension d) { setSize(d.width,d.height); }
public void setSize(int w,int h) {
super.setSize(w,h);
XLEN = w;
YLEN = h-50;
clear_all();
}
public static void main(String[] argv)
{
new AppletFrame("svm_toy",new svm_toy(),500,500+50);
}
}
class AppletFrame extends Frame {
AppletFrame(String title, Applet applet, int width, int height)
{
super(title);
this.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
applet.init();
applet.setSize(width,height);
applet.start();
this.add(applet);
this.pack();
this.setVisible(true);
}
}
Could someone give me an example or explanation? I also would like to scale my training data. Where is the right place to scale?
Thanks
SVM-Toy
SVM Toy is - as the name suggests - a simple toy build by the LIBSVM dev team and is not recommended for "productive" visualization of the SVM's decision boundary.
Moreover looking into the source-code of svm_toy it becomes clear, that this tool only supports 2D vectors.
Relevant code fragment is taken from the button_load_clicked() Method:
while ((line = fp.readLine()) != null) {
StringTokenizer st = new StringTokenizer(line, " \t\n\r\f:");
if (st.countTokens() == 5) {
byte value = (byte) atoi(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
st.nextToken();
double y = atof(st.nextToken());
point_list.addElement(new point(x, y, value));
} else if (st.countTokens() == 3) {
double y = atof(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
point_list.addElement(new point(x, y, current_value));
} else {
break;
}
}
As you can see, the svm_toy implementation can only handle 2D vectors, which means it only supports vectors, which were constructed out of two features.
That means, you can only read and display files which are build from only two features like for example the fourclass dataset provided by the LIBSVM authors. However it seems, that this feature is not supported within this implementation.
I think, that the tool is designed for interactive visualization. You are able to change the color and click on the black application screen. After you set some points (each color representing an own class), you can click "run" and the decision boundary is displayed.
Displaying the desicion boundary in an high dimensional vector space is even nearly impossible. I would recommend to not use this tool implementation for any productive / scientific purpose.
Scaling
Scaling of your training data should be done after you transformed it into it's numeric representation and before you are going forward to train your SVM with this data.
In short that means, you have to do the following steps before using svm_train
Construct the numeric representation for each data point (with the help of feature selection, ...)
Analyse the resulting numeric representation for each data point
Scale your data for example to [-1,1]
Go ahead and train your SVM model. Note well, that you have to repeat 1-3 for predicting unknown data points. The only difference is, that you already know the necessary features, so there is no need for feature selection.

JAVA SWT Animated GIF

I need to add animated gif in form of spinner, into some SWT widget like for example Label. This Label will be as default labelSpiner.setEnabled(false). When I start some long duration operation this Label with spinner will show (labelSpinner.setEnabled(true))in right corner of window.
Normaly Image can be added to Label by labelSpinner.setImage(arg0)
If I add this SPINNER.GIF normal way into Label, it wont animate, its only static Image.
Does anybody know how to add animated gif (for example some spinner) into SWT based JAVA SE app widget? I browsed many examples but many of them were useless, and those good were too complicated.
I would like to do it very simple.
Can somebody help?
Did you try the Eclipse article about SWT Images?
This part will load the image and display it in a Canvas:
ImageLoader loader = new ImageLoader();
loader.load(getClass().getResourceAsStream("Idea_SWT_Animation.gif"));
Canvas canvas = new Canvas(shell,SWT.NONE);
image = new Image(display,loader.data[0]);
int imageNumber;
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener(){
public void paintControl(PaintEvent event){
event.gc.drawImage(image,0,0);
}
});
And this part updates the gif:
Thread thread = new Thread(){
public void run(){
long currentTime = System.currentTimeMillis();
int delayTime = loader.data[imageNumber].delayTime;
while(currentTime + delayTime * 10 > System.currentTimeMillis()){
// Wait till the delay time has passed
}
display.asyncExec(new Runnable(){
public void run(){
// Increase the variable holding the frame number
imageNumber = imageNumber == loader.data.length-1 ? 0 : imageNumber+1;
// Draw the new data onto the image
ImageData nextFrameData = loader.data[imageNumber];
Image frameImage = new Image(display,nextFrameData);
gc.drawImage(frameImage,nextFrameData.x,nextFrameData.y);
frameImage.dispose();
canvas.redraw();
}
});
}
};
shell.open();
thread.start();
After trying at least 3 different animated GIF examples, none of which worked, I started working on my own, based mainly on the answer above.
Here is a complete running example including:
a base64 decoder (courtesy of http://www.source-code.biz/base64coder/java/ )
spinner GIF
main method
Remove the main method, base64 methods, and image data, and you will have a working animated GIF canvas.
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.ImageLoader;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
public class AnimatedGif extends Canvas {
public static void main(String[] args) throws IOException {
byte[] d = decode(b64.toCharArray(), 0, b64.length());
Display display = new Display ();
Shell shell = new Shell(display);
shell.setBounds(0, 0, 100, 100);
AnimatedGif gif = new AnimatedGif(shell, SWT.NONE);
gif.setLocation(10,10);
gif.setSize(16, 16);
gif.load(new java.io.ByteArrayInputStream(d));
shell.open();
gif.animate();
while (!shell.isDisposed ()) {
if (!display.readAndDispatch ()) display.sleep ();
}
display.dispose ();
}
private final ImageLoader loader = new ImageLoader();
private int img = 0;
private volatile boolean animating = false;
private Thread animateThread;
public AnimatedGif(Composite parent, int style) {
super(parent, style);
}
public void load (InputStream resource) throws IOException {
loader.load(resource);
}
public void animate() {
if (animateThread == null) {
animateThread = createThread();
animateThread.setDaemon(true);
}
if (animateThread.isAlive())
return;
animateThread.start();
}
public void stop() {
animating = false;
if (animateThread != null)
try {
animateThread.join();
animateThread = null;
} catch (InterruptedException e) {
// do nothing
}
}
private Thread createThread() {
return new Thread() {
long currentTime = System.currentTimeMillis();
final Display display = getParent().getDisplay();
public void run() {
animating = true;
while(animating) {
img = (img == loader.data.length-1) ? 0 : img + 1;
int delayTime = Math.max(50, 10*loader.data[img].delayTime);
long now = System.currentTimeMillis();
long ms = Math.max(currentTime + delayTime - now, 5);
currentTime += delayTime;
try {
Thread.sleep(ms);
} catch(Exception e) {
return;
}
if (!display.isDisposed())
display.asyncExec(new Runnable() {
#Override
public void run() {
ImageData nextFrameData = loader.data[img];
Image frameImage = new Image(display, nextFrameData);
new GC(AnimatedGif.this).drawImage(frameImage, nextFrameData.x, nextFrameData.y);
frameImage.dispose();
//canvas.redraw();
}
});
}
Display.getDefault().asyncExec(new Runnable() {
#Override
public void run() {
new GC(AnimatedGif.this).fillRectangle(
0,
0,
getBounds().width,
getBounds().height);
}
});
}
};
}
private static final char[] map1 = new char[64];
static {
int i = 0;
for (char c = 'A'; c <= 'Z'; c++)
map1[i++] = c;
for (char c = 'a'; c <= 'z'; c++)
map1[i++] = c;
for (char c = '0'; c <= '9'; c++)
map1[i++] = c;
map1[i++] = '+';
map1[i++] = '/';
}
private static final byte[] map2 = new byte[128];
static {
for (int i = 0; i < map2.length; i++)
map2[i] = -1;
for (int i = 0; i < 64; i++)
map2[map1[i]] = (byte) i;
}
public static byte[] decode(char[] in, int iOff, int iLen) {
if (iLen % 4 != 0)
throw new IllegalArgumentException(
"Length of Base64 encoded input string is not a multiple of 4.");
while (iLen > 0 && in[iOff + iLen - 1] == '=')
iLen--;
int oLen = (iLen * 3) / 4;
byte[] out = new byte[oLen];
int ip = iOff;
int iEnd = iOff + iLen;
int op = 0;
while (ip < iEnd) {
int i0 = in[ip++];
int i1 = in[ip++];
int i2 = ip < iEnd ? in[ip++] : 'A';
int i3 = ip < iEnd ? in[ip++] : 'A';
if (i0 > 127 || i1 > 127 || i2 > 127 || i3 > 127)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int b0 = map2[i0];
int b1 = map2[i1];
int b2 = map2[i2];
int b3 = map2[i3];
if (b0 < 0 || b1 < 0 || b2 < 0 || b3 < 0)
throw new IllegalArgumentException(
"Illegal character in Base64 encoded data.");
int o0 = (b0 << 2) | (b1 >>> 4);
int o1 = ((b1 & 0xf) << 4) | (b2 >>> 2);
int o2 = ((b2 & 3) << 6) | b3;
out[op++] = (byte) o0;
if (op < oLen)
out[op++] = (byte) o1;
if (op < oLen)
out[op++] = (byte) o2;
}
return out;
}
static String b64 =
"R0lGODlhEAAQAPf/AMra5KO9zOzz9s3b5MDS3aS+zZOwwsrZ4+bt89jm7IKftuLr" +
"8ZCswH6bs9vn7qrD0+Ts8oqnvYKetomlu8PV4t7p79Pg6dzp79vm7q/F1aC6zdTi" +
"6bbL2XqYsdDf55SwxK/H1ajC0Yajudzp7q7E087c59Xi6niWr8XY4qW/0LnM2aO9" +
"zoyov7bK2djk7ICbtIOhtt7p7pq1yc3b5sDU38DR39vm7LDH1afA0Yqovebu85Gt" +
"wouovsvZ5oCdtunv9IaiuXyZscrb5qnC0Zezxdjj64WiuZ65y7vQ3N/p8OLq8avE" +
"1MTX4o6rwJaxxLnN29jm7a7F1dvm7Y2qwH2bs7zQ3Jizx9Df6djj7HmXr+Tt8rvO" +
"26/E1LrO3KC5zKW+z8/e577Q3oCctYShubTK2HyasZGtwHuYsdTg6enw89/p76zE" +
"04iku+Lr8LXJ2JKvw+Xt8tHf583b57vP3pu2ycjY45m0yNvp7qzE1bHI13qZsaK8" +
"zn2aspq0yODp8Nfi6svb5oaiuuDp76vE09Hf6ZWzxMfY4qnC087e5+rx9eLs8pSv" +
"xL/R3rPI2Nfj64ajuq7E1Y2pv7rQ2+Lp8J65yarC1JKvxLzR3nybsrzQ3unw9HmY" +
"sLvR3LPI17nO28rY5Iqlvc3c5urx9Iakus/f53iVrtPi6cnY436btNTi6tfi67PK" +
"1rHI2H6bspy3yJ63ydXi7JezxKjA0ZGuwNjk64CbtazE0Zezx8DR3NXi68TX4Nvn" +
"7e7097vP24CctH2atIqlvN7o7tPh6Nzn7JGvwK/F1MbX4dHg6MfY4b7S3c/e5n2b" +
"srjN2ezy9eDq78rb4svb473R2+Ts8cHV3r7R3eXu8rfN2JSvwo+tv9/p7pSxw6C7" +
"ytbj6cTX36zD1NHg5vL3+KnB0Onx9cPV35y4yLPJ1dnl7LnO2cHU3avD0bnP2sDV" +
"4pKvwdfk7O7z99Xi6XqXsNTi6N3o7sjY4ubv8s7c5qvE0env842svuLs8Z64yJm0" +
"xZq2x46svuLq8P///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQFAAD/ACwAAAAAEAAQ" +
"AAAIvQD/CRR4poEEf0q0DFz478SZFwqAxPC3AAEChqU6GAQSwcHEihcFntAoQUQE" +
"BgkcVKCI4IdAkiYZfCiScmXFfzBzMFhYk6K/jScZ/uspSEFMJ0KxpIxxAQgPBk5k" +
"CDVRBIoDDE8/yNAg1MIGmglkyvDy5cHCEh4smIC14d/WLyGGQGpBA4UQtBYsCDyy" +
"AseDByBaqKhhqAfaKwNTPIiyBAQZFUjqGmY4KEMGEG5UdAlDgYlQgQ9IdOJQhRPD" +
"gAAh+QQFAAD/ACwAAAAAEAAQAAAI1gD/CRR4IkiZBjx0DFwoMIueBj58EBGlRAtD" +
"h1TEKAikgJUOfxAWbsKk0QgbFjmEJEkiEIFAVBLGjMoxZUcfG1AEhvz3YiOoSAz/" +
"3VHzD8EPIxNYMPgQVMqFSRAWsGjyhoidoAmkVFjgb8eHPq80BMWS4I4fQZboaFiB" +
"I6gJFxhGXDiyJ4WsIZUWItrwxwYGF/9ShHjwIEOLGkwAkbJgAgvgwHjwLLnB4QmF" +
"HnI8oNmwYSCJDCDycPBUA8UMMHGuMHxAQvSTTBTqzBASVCCIFlWQ1KDAMCAAIfkE" +
"BQAA/wAsAAAAABAAEAAACMsA/wkUeKZDmSANfAxc+K8BlRe+FNRSMIbKMoYwFCjI" +
"CAOGEY1iqgyMMIHkhJIRgE14JImXEoEMdpiJyYBBpBxN5mjyB+GfEytWiNyyspCR" +
"ohE8f2jQ4GUpw38JLvBE8CVFChxDnkadOmTQoRCQtEqFoKNYlAdrMohNyiVDlBst" +
"5ix0lAAphAX/uNwgoyIMDQpMdJWAgrSNQC6NODyZQ4NJnVAWCFeYNLDFli6MDM3w" +
"gCZXgl0XGFaRVAPFZjQmHEl5KhCaaQ+EKDAMCAAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"2wD/CRRIJUiHLCdODFz47xEMCT5eNOBjMAjDKTkisBFh5J+CiK3KDFy06I2ZJix4" +
"TJggYowCPr8EHqEjg4iTDzuaTGExQYEVQP9WCN3j5cjCKXYuIfihZEiIEDhwMBSo" +
"KMYCLSSiHAqxZKoNB0kW/MvAZQmJDFP/jfi3AMEaEDcacZgqZSACEjfIcOjCZKEL" +
"DGr+IdDxz42KJ0hQCEFkQVUCB34WEP6nYssWGqfkxDH1B8MIfwv8DUQShsKnEoQ2" +
"FMEQI4lohhR6gLFg4q+DCmn/zQBzZUMqGwwDAgAh+QQFAAD/ACwAAAAAEAAQAAAI" +
"uAD/CRTIgAcQBQ06DFz474iMDwVFSEh4gmGKFRoeMogAZGKHUgMHPcDxxYsMJxsl" +
"UhSYIcqDEF8yosyh8sy/BxmWPBjyYGFKCS/OkABBFBLDfxsPNujkhkyLFkchcpTA" +
"QYVVGkeLJHAQw1+VLkhqoDi6tasSTmFoGBJSYuHWCv4WaPlHAUWPEh4smMDyNi4C" +
"gUzu5t2g1QHcBQj+CsSrtwgUw34VD7xiAVZZvz+OCtyQAMMFQf4YBgQAIfkEBQAA" +
"/wAsAAAAABAAEAAACNYA/wkUeMTSDhZGXgxc+C9Fij10PjSZoADVJoYk8IRIoaHP" +
"GxaBJGDKMvBBBjwPZK14RYQBqDFi9AwkAWLJgyE4NNj5EGmUAiokQeTJcyNDJYY5" +
"jMQ80YKD0xYM/01ho6BBkCpPPD2pEfVgIB9lkGSqQYFJ1D45FPhoQBZFD0BRbQhh" +
"RYQHhToz5JBCxBBKEh2idPybAcaDhQ0msCSQcudfEn9KBAqJg8bEHxeLL6gRCEHL" +
"wCsbsNjAcKfCpH8Qov7b4ALDCD8LICD4N1u1iwuC/C34wTAgACH5BAUAAP8ALAAA" +
"AAAQABAAAAjNAP8JFMil2JAvGpwMXPiPC5cMUQal0GCFQQSGLRrdiPLgUAovVnZM" +
"gDGwyhYOZG6sCYFDAxEzERQ0ECipyxMVLTJAGqLhFoMJCqj8o1CD0ZwwcxaGHPni" +
"DAoUhmjQYPjvpwJfHWZoZUKBKkwYCsp4GFunK8NIEWDUCkIIDZpQuqjmAGZEJgUT" +
"uSyUcLSQUROgY8b8c5QACpQEiBErmvMoqEApu0ZIvkD5wghNksQsG3ihgr/PoD/z" +
"qsJwUhsIqFNDUEJV4IIfOhAg+MEwIAAh+QQFAAD/ACwAAAAAEAAQAAAI1wD/CRRI" +
"Yk0GEkNWDFz4T4WbGyC4RAmx4sgihki2qCBzY8khinQWTRlIIcyWJxwakQiBY4+M" +
"NzkeCaRAgQaSLhwyLMHhhYiZCDD+zejx6RQKJguPOGnCRgIVMGBKyBHC8N8HFiJ8" +
"BLligVAcRFV38DDyosMGExtMWajaZMK/BllSuSjyR1XVKRMU8DlhA4PfBC4WTsGq" +
"9cQ/BzFGOMAg5Z+Nf3YmjCkrsEISf37U/FOs6JICBa2CDPS3YMHAJDEQWOFThqE/" +
"HQj+lV7wA9CvqgJhx9aihGFAACH5BAUAAP8ALAAAAAAQABAAAAjAAP8JFMipCodO" +
"JB4MXPiPCYUwXVS4AZEhwyCGJXqgoIFEBRkQS6I8SDHwioeMhmqoaAHiwQMcK44I" +
"tGDhpJCNLSANCfFFg4x/G2CZqFli4YMvXmR8YJAgQZENFhj+87mUBwYHUIqYkCrD" +
"CQMeQC7EcJAAi1SvEUQoEOSvAlmpDCIAkdDA34K2bwcyyPGPbod/CO66dbo0Ld2B" +
"PwLjTRBXhN8TAxEgEDhWrt9SDCXfjQFEwYszkKX+06LEH90zDAMCACH5BAkAAP8A" +
"LAAAAAAQABAAAAjWAP8JFEihBpIqLUAMXPhPyIw6FDI94ZCHxAOGV+KAmYGihieK" +
"IDKQGLhhAxoPcnpQmHhjCR48KQS6wGLCAilATGq0yPDgQYiYLjDY+LMB0cJKQ2Sl" +
"2HPkwggMLkww/IdjhQY6lgT5uZMAy1QNr/p82OFvQQUpCabaIfKmCYsFECZdkDL1" +
"AwMWE4z8QPBPzZ2pkUAFUvBCIASBUGz02TElx6gxElAJ5PsvSRIhOViwMaJADKZN" +
"CyH408FKwWAxVPRkYahFiSgiPnw0UD11oA4eDcoEOcEwIAAh+QQJAAD/ACwAAAAA" +
"EAAQAAAIyAD/CRw4zAYtYdwGKvyXz1m2YAaF1XsGbWE1aW3UQKQVb8CBA8YG3qMH" +
"Z4E8c++OKQMwD9k4XALTpClp7tsxD8oGIOs2DaamRDPbOFMIwBi6ZARwuQMKp83C" +
"fwSiVUEqgKnThdTU9YrWq5nVp73OMWNmzWvQp6tWlUv7j6m4AQpt2Utny1a4tmnc" +
"AQhAaVuAAAUABxaoCdwzfq70UaJEzpUrSo8HQjNQqNC+WLEKZda8cNYsIoWuFTLA" +
"bhaxpwK1EWOADV+/hQEBACH5BAUAAP8ALAAAAAAQABAAAAjcAP8JFGgj1YYrYGYM" +
"XPivggMMLkxYANODAsN//pLEwFBkA6ESnyiEQTLQ3wJ/IzD8MRVHzikaW7aoEKhj" +
"gR8HCVRZQCQEBZInKtz804HAnxqIC5l04UDmBgkEEPxdkHKRQ6MbINYgODkiwcUM" +
"JJZwybA1iQMbF5eEOBSFhJYFMRR5ZYgDR4gQQ5T8QHDJzpSFR7zsWUH4HyArCiaw" +
"mNJkxwcnRGTQOSLwFx8FY0RMmMCDRRMzbxYtGlim1QsfCmAYEcEmQo6/C4N0CMKn" +
"wWkJMB5dFHjiRJbZVBgGBAAh+QQFAAD/ACwAAAAAEAAQAAAIuQD/CRToT9AFDAk2" +
"DFz47weCBf5iOEgAy8IVhgge+qvgAEoRExY8lBiYESLHBEU2hCzRg4lAjSexgBTZ" +
"AwWFf1pMTlxYQoghGmE4KYm4kyGKGki6VCGKkuE/GiqicpAAJAKDD05btCDjplMD" +
"BVUZOIUEoiyJMy8kiLC68MGQB0syPPh3poHaHAycyNDwJcSDKBkEnuhgd21eGV6+" +
"4HgwaGApwlStfti7IgXDwYV5XJVxxKlAwmA1MwwIACH5BAUAAP8ALAAAAAAQABAA" +
"AAjVAP8JFPhjgT9BF1wMXPgPQUMIC/yMwOBiA0OBEP5NqnAHgw0sG64M1JLxn5oL" +
"UhK4+GMCTRwhApX4S/LvTkosJjZY8ABmxj8donQkgcIQESk5M+pQ4EGElRAbFwH1" +
"QEGhRgMfCnL0ucikaiYkZXwEYrHjYo0nnp5UCdJAAZspF1twmNvihB4xRnIwrJTh" +
"Rp48IP5loaJgVKQPdjTgGPJgCQgSA++OAcWAyKsVsh7gyfBgYBZMEsa+6aMhRQg8" +
"kBduQqVgQpMPdPakSHFR4AsjZC0dYRgQACH5BAUAAP8ALAAAAAAQABAAAAjRAP8J" +
"FFgBAQIdP2wMXPhPCYSHECG0GcaQBi9/GDNirNBuIA8xkjSNuEDywoiTuzYIpKLg" +
"0RxFCWLGhAIlgaN/D4womNCE0UJHJSzkMpHphQIjwFgw/KcrFBo0hCTUghHBy1JG" +
"dTx4CNVAAVUiS9cxmTGjRxlfO80spUHDEAoUE17AmLDDysIuYeYwqnHpH8sJDG5p" +
"CFGJSwsVT7pIEtg1ghkiGnCEWHODDIctWwbOreslxaEHUW40usEwAgMrGlIMipKB" +
"y5qlAp1o+DKExBeGAQEAIfkEBQAA/wAsAAAAABAAEAAACNcA/wkUqETLPwQIByr8" +
"xwPQjwUQD+rAsBATHysIYiQZCNHfwBWtFCi4pMjBiH9q/PhLkkBghxdjJtj5Z+Of" +
"FAwmYzj4xyaIDxEsmih0kQCDUSzw+CiYsGPhP1V/irhIlaXBvwmWnFowtcHElTIv" +
"jPD44FRIHEIWrvgESnahEDklwMygIoFNEydHFFJAcepTjzr/YEQwQ8QLjgfeOHRB" +
"QoMCBYFAcryRsQdHCBKNODzZEobDwCmL6KwIcWjJDTIqtiBZaObI6ChcQNxw48ap" +
"wBVDSGRYs2ZhQAA7";
}
here is sample code provided. Basically this is how it works. Unlike other image formats, Animated GIF will have setof ImageData ( like a frame in an animation). You will render this image data on Cavans with the delay that you want.
http://www.java2s.com/Code/Java/SWT-JFace-Eclipse/DisplayananimatedGIF.htm
This one is also working example code based upon Eclipse article about SWT. I was trying to make a slideshow or animated gif dialog from images located in a folder. posted here because it could be useful for somenone.
public class GifDialog extends Dialog {
Shell dialog;
private Canvas canvas;
int numberImage = 1;
private volatile boolean running = true;
final List<Image> imageCollection = new ArrayList<Image>();
DenemeDialog(Shell parent) {
super(parent);
}
public String open() {
Shell parent = getParent();
this.func();
dialog = new Shell(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL);
dialog.setSize(600, 400);
dialog.setText("Show Begins!!!");
Monitor primary = dialog.getDisplay().getPrimaryMonitor();
Rectangle bounds = primary.getBounds();
Rectangle rect = dialog.getBounds();
int x = bounds.x + (bounds.width - rect.width) / 2;
int y = bounds.y + (bounds.height - rect.height) / 2;
dialog.setLocation(x, y);
dialog.setLayout(new FillLayout());
final Canvas canvas = new Canvas(dialog, SWT.NONE);
final Image image = new Image(dialog.getDisplay(), imageCollection.get(
0).getImageData());
final GC gc = new GC(image);
canvas.addPaintListener(new PaintListener() {
public void paintControl(PaintEvent event) {
event.gc.drawImage(image, 0, 0);
}
});
Thread thread = new Thread() {
public void run() {
while (running) {
dialog.getDisplay().asyncExec(new Runnable() {
public void run() {
numberImage = numberImage == imageCollection.size() - 1
? 0 : numberImage + 1;
ImageData nextFrameData = imageCollection.get(
numberImage).getImageData();
Image frameImage = new Image(dialog.getDisplay(),
nextFrameData);
gc.drawImage(frameImage, nextFrameData.x,
nextFrameData.y);
frameImage.dispose();
canvas.redraw();
if (numberImage == 0)
try {
running = false;
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
}
};
dialog.open();
thread.start();
Display display = parent.getDisplay();
while (!dialog.isDisposed()) {
if (!display.readAndDispatch())
display.sleep();
}
return "After Dialog";
}
public void func() {
File path = new File("..\folder");
File[] files = path.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isFile()) { // this line weeds out other
// directories/folders
try {
ImageData imageData = new ImageData(
new ByteArrayInputStream(loadImage(files[i])));
final Image image = new Image(Display.getDefault(),
imageData);
imageCollection.add(image);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
public byte[] loadImage(File file) throws IOException {
BufferedImage image = ImageIO.read(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageIO.write(image, "jpg", bos);
return bos.toByteArray();
}
public Canvas getCanvas() {
return canvas;
}
public void setCanvas(Canvas canvas) {
this.canvas = canvas;
}
Note: None of the examples that rely on SWT's ImageLoader.class will work on GTK Linux SWT, as there is currently a bug that hard-codes the maximum frames to 32 and sets the delay between frames incorrectly.
See GTK ImageLoader.java
// Fix the number of GIF frames as GdkPixbufAnimation does not provide an API to
// determine number of frames.
int num_frames = 32;

Categories

Resources