Converting a series of BufferedImages to a video in Java? - java

How would I convert an an array of BufferedImages into a video? I'm making a screen recorder.
How would I compress the video afterward?

You'll need the JMF (Java Media Framework) API for this. Sun has a code sample on the subject in the JMF documentation: Generating a Movie File from a List of (JPEG) Images.
Update since the takeover by Oracle, a lot of links broke, including the JMF ones. Fortunately the JpegImagesToMovie.java code sample has been mirrored here. For the sake of completeness, here's a copypaste:
package org.apollo.jmf.test;
/*
* #(#)JpegImagesToMovie.java 1.3 01/03/13
*
* Copyright (c) 1999-2001 Sun Microsystems, Inc. All Rights Reserved.
*
* Sun grants you ("Licensee") a non-exclusive, royalty free, license to use,
* modify and redistribute this software in source and binary code form,
* provided that i) this copyright notice and license appear on all copies of
* the software; and ii) Licensee does not utilize the software in a manner
* which is disparaging to Sun.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY
* IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE
* LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
* OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS
* LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT,
* INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
* CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF
* OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*
* This software is not designed or intended for use in on-line control of
* aircraft, air traffic, aircraft navigation or aircraft communications; or in
* the design, construction, operation or maintenance of any nuclear
* facility. Licensee represents and warrants that it will not use or
* redistribute the Software for such purposes.
*/
import java.io.*;
import java.util.*;
import java.awt.Dimension;
import javax.media.*;
import javax.media.control.*;
import javax.media.protocol.*;
import javax.media.protocol.DataSource;
import javax.media.datasink.*;
import javax.media.format.VideoFormat;
/**
* This program takes a list of JPEG image files and convert them into
* a QuickTime movie.
*/
public class JpegImagesToMovie implements ControllerListener, DataSinkListener {
public boolean doIt(int width, int height, int frameRate, Vector inFiles, MediaLocator outML) {
ImageDataSource ids = new ImageDataSource(width, height, frameRate, inFiles);
Processor p;
try {
System.err.println("- create processor for the image datasource ...");
p = Manager.createProcessor(ids);
} catch (Exception e) {
System.err.println("Yikes! Cannot create a processor from the data source.");
return false;
}
p.addControllerListener(this);
// Put the Processor into configured state so we can set
// some processing options on the processor.
p.configure();
if (!waitForState(p, p.Configured)) {
System.err.println("Failed to configure the processor.");
return false;
}
// Set the output content descriptor to QuickTime.
p.setContentDescriptor(new ContentDescriptor(FileTypeDescriptor.QUICKTIME));
// Query for the processor for supported formats.
// Then set it on the processor.
TrackControl tcs[] = p.getTrackControls();
Format f[] = tcs[0].getSupportedFormats();
if (f == null || f.length <= 0) {
System.err.println("The mux does not support the input format: " + tcs[0].getFormat());
return false;
}
tcs[0].setFormat(f[0]);
System.err.println("Setting the track format to: " + f[0]);
// We are done with programming the processor. Let's just
// realize it.
p.realize();
if (!waitForState(p, p.Realized)) {
System.err.println("Failed to realize the processor.");
return false;
}
// Now, we'll need to create a DataSink.
DataSink dsink;
if ((dsink = createDataSink(p, outML)) == null) {
System.err.println("Failed to create a DataSink for the given output MediaLocator: " + outML);
return false;
}
dsink.addDataSinkListener(this);
fileDone = false;
System.err.println("start processing...");
// OK, we can now start the actual transcoding.
try {
p.start();
dsink.start();
} catch (IOException e) {
System.err.println("IO error during processing");
return false;
}
// Wait for EndOfStream event.
waitForFileDone();
// Cleanup.
try {
dsink.close();
} catch (Exception e) {}
p.removeControllerListener(this);
System.err.println("...done processing.");
return true;
}
/**
* Create the DataSink.
*/
DataSink createDataSink(Processor p, MediaLocator outML) {
DataSource ds;
if ((ds = p.getDataOutput()) == null) {
System.err.println("Something is really wrong: the processor does not have an output DataSource");
return null;
}
DataSink dsink;
try {
System.err.println("- create DataSink for: " + outML);
dsink = Manager.createDataSink(ds, outML);
dsink.open();
} catch (Exception e) {
System.err.println("Cannot create the DataSink: " + e);
return null;
}
return dsink;
}
Object waitSync = new Object();
boolean stateTransitionOK = true;
/**
* Block until the processor has transitioned to the given state.
* Return false if the transition failed.
*/
boolean waitForState(Processor p, int state) {
synchronized (waitSync) {
try {
while (p.getState() < state && stateTransitionOK)
waitSync.wait();
} catch (Exception e) {}
}
return stateTransitionOK;
}
/**
* Controller Listener.
*/
public void controllerUpdate(ControllerEvent evt) {
if (evt instanceof ConfigureCompleteEvent ||
evt instanceof RealizeCompleteEvent ||
evt instanceof PrefetchCompleteEvent) {
synchronized (waitSync) {
stateTransitionOK = true;
waitSync.notifyAll();
}
} else if (evt instanceof ResourceUnavailableEvent) {
synchronized (waitSync) {
stateTransitionOK = false;
waitSync.notifyAll();
}
} else if (evt instanceof EndOfMediaEvent) {
evt.getSourceController().stop();
evt.getSourceController().close();
}
}
Object waitFileSync = new Object();
boolean fileDone = false;
boolean fileSuccess = true;
/**
* Block until file writing is done.
*/
boolean waitForFileDone() {
synchronized (waitFileSync) {
try {
while (!fileDone)
waitFileSync.wait();
} catch (Exception e) {}
}
return fileSuccess;
}
/**
* Event handler for the file writer.
*/
public void dataSinkUpdate(DataSinkEvent evt) {
if (evt instanceof EndOfStreamEvent) {
synchronized (waitFileSync) {
fileDone = true;
waitFileSync.notifyAll();
}
} else if (evt instanceof DataSinkErrorEvent) {
synchronized (waitFileSync) {
fileDone = true;
fileSuccess = false;
waitFileSync.notifyAll();
}
}
}
public static void main(String args[]) {
if (args.length == 0)
prUsage();
// Parse the arguments.
int i = 0;
int width = -1, height = -1, frameRate = 1;
Vector inputFiles = new Vector();
String outputURL = null;
while (i < args.length) {
if (args[i].equals("-w")) {
i++;
if (i >= args.length)
prUsage();
width = new Integer(args[i]).intValue();
} else if (args[i].equals("-h")) {
i++;
if (i >= args.length)
prUsage();
height = new Integer(args[i]).intValue();
} else if (args[i].equals("-f")) {
i++;
if (i >= args.length)
prUsage();
frameRate = new Integer(args[i]).intValue();
} else if (args[i].equals("-o")) {
i++;
if (i >= args.length)
prUsage();
outputURL = args[i];
} else {
for (int j = 0; j < 120; j++) {
inputFiles.addElement(args[i]);
}
}
i++;
}
if (outputURL == null || inputFiles.size() == 0)
prUsage();
// Check for output file extension.
if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
System.err.println("The output file extension should end with a .mov extension");
prUsage();
}
if (width < 0 || height < 0) {
System.err.println("Please specify the correct image size.");
prUsage();
}
// Check the frame rate.
if (frameRate < 1)
frameRate = 1;
// Generate the output media locators.
MediaLocator oml;
if ((oml = createMediaLocator(outputURL)) == null) {
System.err.println("Cannot build media locator from: " + outputURL);
System.exit(0);
}
JpegImagesToMovie imageToMovie = new JpegImagesToMovie();
imageToMovie.doIt(width, height, frameRate, inputFiles, oml);
System.exit(0);
}
static void prUsage() {
System.err.println("Usage: java JpegImagesToMovie -w <width> -h <height> -f <frame rate> -o <output URL> <input JPEG file 1> <input JPEG file 2> ...");
System.exit(-1);
}
/**
* Create a media locator from the given string.
*/
static MediaLocator createMediaLocator(String url) {
MediaLocator ml;
if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null)
return ml;
if (url.startsWith(File.separator)) {
if ((ml = new MediaLocator("file:" + url)) != null)
return ml;
} else {
String file = "file:" + System.getProperty("user.dir") + File.separator + url;
if ((ml = new MediaLocator(file)) != null)
return ml;
}
return null;
}
///////////////////////////////////////////////
//
// Inner classes.
///////////////////////////////////////////////
/**
* A DataSource to read from a list of JPEG image files and
* turn that into a stream of JMF buffers.
* The DataSource is not seekable or positionable.
*/
class ImageDataSource extends PullBufferDataSource {
ImageSourceStream streams[];
ImageDataSource(int width, int height, int frameRate, Vector images) {
streams = new ImageSourceStream[1];
streams[0] = new ImageSourceStream(width, height, frameRate, images);
}
public void setLocator(MediaLocator source) {
}
public MediaLocator getLocator() {
return null;
}
/**
* Content type is of RAW since we are sending buffers of video
* frames without a container format.
*/
public String getContentType() {
return ContentDescriptor.RAW;
}
public void connect() {
}
public void disconnect() {
}
public void start() {
}
public void stop() {
}
/**
* Return the ImageSourceStreams.
*/
public PullBufferStream[] getStreams() {
return streams;
}
/**
* We could have derived the duration from the number of
* frames and frame rate. But for the purpose of this program,
* it's not necessary.
*/
public Time getDuration() {
return DURATION_UNKNOWN;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
/**
* The source stream to go along with ImageDataSource.
*/
class ImageSourceStream implements PullBufferStream {
Vector images;
int width, height;
VideoFormat format;
int nextImage = 0; // index of the next image to be read.
boolean ended = false;
public ImageSourceStream(int width, int height, int frameRate, Vector images) {
this.width = width;
this.height = height;
this.images = images;
format = new VideoFormat(VideoFormat.JPEG,
new Dimension(width, height),
Format.NOT_SPECIFIED,
Format.byteArray,
(float)frameRate);
}
/**
* We should never need to block assuming data are read from files.
*/
public boolean willReadBlock() {
return false;
}
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (nextImage >= images.size()) {
// We are done. Set EndOfMedia.
System.err.println("Done reading all images.");
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
String imageFile = (String)images.elementAt(nextImage);
nextImage++;
System.err.println(" - reading image file: " + imageFile);
// Open a random access file for the next image.
RandomAccessFile raFile;
raFile = new RandomAccessFile(imageFile, "r");
byte data[] = null;
// Check the input buffer type & size.
if (buf.getData() instanceof byte[])
data = (byte[])buf.getData();
// Check to see the given buffer is big enough for the frame.
if (data == null || data.length < raFile.length()) {
data = new byte[(int)raFile.length()];
buf.setData(data);
}
// Read the entire JPEG image from the file.
raFile.readFully(data, 0, (int)raFile.length());
System.err.println(" read " + raFile.length() + " bytes.");
buf.setOffset(0);
buf.setLength((int)raFile.length());
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
// Close the random access file.
raFile.close();
}
/**
* Return the format of each video frame. That will be JPEG.
*/
public Format getFormat() {
return format;
}
public ContentDescriptor getContentDescriptor() {
return new ContentDescriptor(ContentDescriptor.RAW);
}
public long getContentLength() {
return 0;
}
public boolean endOfStream() {
return ended;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
}

You can use Xuggler (on Windows, Mac or Linux) to do this, and the following tutorials will show you exactly how to do it. In particular, see the (I'm not kidding) "How to Grow Some Balls" tutorial for a program that makes a video out of a series of BufferedImages (and some audio).

Werner Randelshofer did some work to create a Quicktime movie. See: http://www.randelshofer.ch/blog/2008/08/writing-avi-videos-in-pure-java/
You could also use Xuggler: http://www.xuggle.com

Related

JavaPicture cannot be resolved to a type

I'm using JPegImagesToMovie to create a movie from a bunch of jpeg images, but I get an error in the java file. This is the file:
/*
* #(#)JpegImagesToMovie.java 1.3 01/03/13
*
* Copyright (c) 1999-2001 Sun Microsystems, Inc. All Rights Reserved.
*
* Sun grants you ("Licensee") a non-exclusive, royalty free, license to use,
* modify and redistribute this software in source and binary code form,
* provided that i) this copyright notice and license appear on all copies of
* the software; and ii) Licensee does not utilize the software in a manner
* which is disparaging to Sun.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY
* IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE
* LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
* OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS
* LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT,
* INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
* CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF
* OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*
* This software is not designed or intended for use in on-line control of
* aircraft, air traffic, aircraft navigation or aircraft communications; or in
* the design, construction, operation or maintenance of any nuclear
* facility. Licensee represents and warrants that it will not use or
* redistribute the Software for such purposes.
*/
import java.io.*;
import java.util.*;
import java.awt.Dimension;
import javax.media.*;
import javax.media.control.*;
import javax.media.protocol.*;
import javax.media.protocol.DataSource;
import javax.media.datasink.*;
import javax.media.format.VideoFormat;
import javax.media.format.JPEGFormat;
/**
* This program takes a list of JPEG image files and convert them into
* a QuickTime movie.
*/
public class JPegtoMovie implements ControllerListener, DataSinkListener {
public boolean doItPath(int width, int height, int frameRate, Vector inFiles, String outputURL) {
// Check for output file extension.
if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
//System.err.println("The output file extension should end with a .mov extension");
prUsage();
}
// Generate the output media locators.
MediaLocator oml;
if ((oml = createMediaLocator("file:" + outputURL)) == null) {
//System.err.println("Cannot build media locator from: " + outputURL);
System.exit(0);
}
boolean success = doIt(width, height, frameRate, inFiles, oml);
System.gc();
return success;
}
public boolean doIt(int width, int height, int frameRate, Vector inFiles, MediaLocator outML) {
ImageDataSource ids = new ImageDataSource(width, height, frameRate, inFiles);
Processor p;
try {
//System.err.println("- create processor for the image datasource ...");
p = Manager.createProcessor(ids);
} catch (Exception e) {
//System.err.println("Yikes! Cannot create a processor from the data source.");
return false;
}
p.addControllerListener(this);
// Put the Processor into configured state so we can set
// some processing options on the processor.
p.configure();
if (!waitForState(p, p.Configured)) {
//System.err.println("Failed to configure the processor.");
p.close();
p.deallocate();
return false;
}
// Set the output content descriptor to QuickTime.
p.setContentDescriptor(new ContentDescriptor(FileTypeDescriptor.QUICKTIME));
// Query for the processor for supported formats.
// Then set it on the processor.
TrackControl tcs[] = p.getTrackControls();
Format f[] = tcs[0].getSupportedFormats();
//System.out.println(f[0].getEncoding());
if (f == null || f.length <= 0) {
//System.err.println("The mux does not support the input format: " + tcs[0].getFormat());
p.close();
p.deallocate();
return false;
}
tcs[0].setFormat(f[0]);
//System.err.println("Setting the track format to: " + f[0]);
// We are done with programming the processor. Let's just
// realize it.
p.realize();
if (!waitForState(p, p.Realized)) {
//System.err.println("Failed to realize the processor.");
p.close();
p.deallocate();
return false;
}
// Now, we'll need to create a DataSink.
DataSink dsink;
if ((dsink = createDataSink(p, outML)) == null) {
//System.err.println("Failed to create a DataSink for the given output MediaLocator: " + outML);
p.close();
p.deallocate();
return false;
}
dsink.addDataSinkListener(this);
fileDone = false;
//System.err.println("start processing...");
// OK, we can now start the actual transcoding.
try {
p.start();
dsink.start();
} catch (IOException e) {
p.close();
p.deallocate();
dsink.close();
//System.err.println("IO error during processing");
return false;
}
// Wait for EndOfStream event.
waitForFileDone();
// Cleanup.
try {
dsink.close();
} catch (Exception e) {}
p.removeControllerListener(this);
//System.err.println("...done processing.");
p.close();
return true;
}
/**
* Create the DataSink.
*/
DataSink createDataSink(Processor p, MediaLocator outML) {
DataSource ds;
if ((ds = p.getDataOutput()) == null) {
//System.err.println("Something is really wrong: the processor does not have an output DataSource");
return null;
}
DataSink dsink;
try {
//System.err.println("- create DataSink for: " + outML);
dsink = Manager.createDataSink(ds, outML);
dsink.open();
} catch (Exception e) {
//System.err.println("Cannot create the DataSink: " + e);
return null;
}
return dsink;
}
Object waitSync = new Object();
boolean stateTransitionOK = true;
/**
* Block until the processor has transitioned to the given state.
* Return false if the transition failed.
*/
boolean waitForState(Processor p, int state) {
synchronized (waitSync) {
try {
while (p.getState() < state && stateTransitionOK)
waitSync.wait();
} catch (Exception e) {}
}
return stateTransitionOK;
}
/**
* Controller Listener.
*/
public void controllerUpdate(ControllerEvent evt) {
if (evt instanceof ConfigureCompleteEvent ||
evt instanceof RealizeCompleteEvent ||
evt instanceof PrefetchCompleteEvent) {
synchronized (waitSync) {
stateTransitionOK = true;
waitSync.notifyAll();
}
} else if (evt instanceof ResourceUnavailableEvent) {
synchronized (waitSync) {
stateTransitionOK = false;
waitSync.notifyAll();
}
} else if (evt instanceof EndOfMediaEvent) {
evt.getSourceController().stop();
evt.getSourceController().close();
}
}
Object waitFileSync = new Object();
boolean fileDone = false;
boolean fileSuccess = true;
/**
* Block until file writing is done.
*/
boolean waitForFileDone() {
synchronized (waitFileSync) {
try {
while (!fileDone)
waitFileSync.wait();
} catch (Exception e) {}
}
return fileSuccess;
}
/**
* Event handler for the file writer.
*/
public void dataSinkUpdate(DataSinkEvent evt) {
if (evt instanceof EndOfStreamEvent) {
synchronized (waitFileSync) {
fileDone = true;
waitFileSync.notifyAll();
}
} else if (evt instanceof DataSinkErrorEvent) {
synchronized (waitFileSync) {
fileDone = true;
fileSuccess = false;
waitFileSync.notifyAll();
}
}
}
public static void main(String args[]) {
if (args.length == 0)
prUsage();
// Parse the arguments.
int i = 0;
int width = -1, height = -1, frameRate = 1;
Vector inputFiles = new Vector();
String outputURL = null;
while (i < args.length) {
if (args[i].equals("-w")) {
i++;
if (i >= args.length)
prUsage();
width = new Integer(args[i]).intValue();
} else if (args[i].equals("-h")) {
i++;
if (i >= args.length)
prUsage();
height = new Integer(args[i]).intValue();
} else if (args[i].equals("-f")) {
i++;
if (i >= args.length)
prUsage();
frameRate = new Integer(args[i]).intValue();
} else if (args[i].equals("-o")) {
i++;
if (i >= args.length)
prUsage();
outputURL = args[i];
} else {
inputFiles.addElement(args[i]);
}
i++;
}
if (outputURL == null || inputFiles.size() == 0)
prUsage();
// Check for output file extension.
if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
System.err.println("The output file extension should end with a .mov extension");
prUsage();
}
if (width < 0 || height < 0) {
System.err.println("Please specify the correct image size.");
prUsage();
}
// Check the frame rate.
if (frameRate < 1)
frameRate = 1;
// Generate the output media locators.
MediaLocator oml;
if ((oml = createMediaLocator(outputURL)) == null) {
System.err.println("Cannot build media locator from: " + outputURL);
System.exit(0);
}
JPegtoMovie imageToMovie = new JPegtoMovie();
imageToMovie.doIt(width, height, frameRate, inputFiles, oml);
System.exit(0);
}
static void prUsage() {
System.err.println("Usage: java JpegImagesToMovie -w <width> -h <height> -f <frame rate> -o <output URL> <input JPEG file 1> <input JPEG file 2> ...");
System.exit(-1);
}
/**
* Create a media locator from the given string.
*/
static MediaLocator createMediaLocator(String url) {
MediaLocator ml;
if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null)
return ml;
if (url.startsWith(File.separator)) {
if ((ml = new MediaLocator("file:" + url)) != null)
return ml;
} else {
String file = "file:" + System.getProperty("user.dir") + File.separator + url;
if ((ml = new MediaLocator(file)) != null)
return ml;
}
return null;
}
///////////////////////////////////////////////
//
// Inner classes.
///////////////////////////////////////////////
/**
* A DataSource to read from a list of JPEG image files and
* turn that into a stream of JMF buffers.
* The DataSource is not seekable or positionable.
*/
class ImageDataSource extends PullBufferDataSource {
ImageSourceStream streams[];
ImageDataSource(int width, int height, int frameRate, Vector images) {
streams = new ImageSourceStream[1];
streams[0] = new ImageSourceStream(width, height, frameRate, images);
}
public void setLocator(MediaLocator source) {
}
public MediaLocator getLocator() {
return null;
}
/**
* Content type is of RAW since we are sending buffers of video
* frames without a container format.
*/
public String getContentType() {
return ContentDescriptor.RAW;
}
public void connect() {
}
public void disconnect() {
}
public void start() {
}
public void stop() {
}
/**
* Return the ImageSourceStreams.
*/
public PullBufferStream[] getStreams() {
return streams;
}
/**
* We could have derived the duration from the number of
* frames and frame rate. But for the purpose of this program,
* it's not necessary.
*/
public Time getDuration() {
return DURATION_UNKNOWN;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
/**
* The source stream to go along with ImageDataSource.
*/
class ImageSourceStream implements PullBufferStream {
Vector images;
int width, height;
VideoFormat format;
int nextImage = 0; // index of the next image to be read.
boolean ended = false;
public ImageSourceStream(int width, int height, int frameRate, Vector images) {
this.width = width;
this.height = height;
this.images = images;
format = new JPEGFormat(new Dimension(width, height),
Format.NOT_SPECIFIED,
Format.byteArray,
(float)frameRate,
75,
JPEGFormat.DEC_422);
}
/**
* We should never need to block assuming data are read from files.
*/
public boolean willReadBlock() {
return false;
}
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (nextImage >= images.size()) {
// We are done. Set EndOfMedia.
//System.err.println("Done reading all images.");
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
//For JES, we pass around JavaPictures
//String imageFile = (String)images.elementAt(nextImage);
JavaPicture image = (JavaPicture)images.elementAt(nextImage);
image.saveImage(".movtemp.jpg");
String imageFile = ".movtemp.jpg";
nextImage++;
//System.err.println(" - reading image file: " + imageFile);
// Open a random access file for the next image.
RandomAccessFile raFile;
raFile = new RandomAccessFile(imageFile, "r");
byte data[] = null;
// Check the input buffer type & size.
if (buf.getData() instanceof byte[])
data = (byte[])buf.getData();
// Check to see the given buffer is big enough for the frame.
if (data == null || data.length < raFile.length()) {
data = new byte[(int)raFile.length()];
buf.setData(data);
}
// Read the entire JPEG image from the file.
raFile.readFully(data, 0, (int)raFile.length());
//System.err.println(" read " + raFile.length() + " bytes.");
buf.setOffset(0);
buf.setLength((int)raFile.length());
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
// Close the random access file.
raFile.close();
}
/**
* Return the format of each video frame. That will be JPEG.
*/
public Format getFormat() {
return format;
}
public ContentDescriptor getContentDescriptor() {
return new ContentDescriptor(ContentDescriptor.RAW);
}
public long getContentLength() {
return 0;
}
public boolean endOfStream() {
return ended;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
}
I get an error on
//For JES, we pass around JavaPictures
//String imageFile = (String)images.elementAt(nextImage);
JavaPicture image = (JavaPicture)images.elementAt(nextImage);
image.saveImage(".movtemp.jpg");
String imageFile = ".movtemp.jpg";
nextImage++;
I don't know what JavaPicture is and using google didn't help very much. It appears that JES is only for linux and I don't know how to use it with Windows, or even if that would work. Does anyone know what this is?
A simple Google search turns up that JpegImagesToMovie originates at coweb.cc.gatech.edu. Searching for JavaPicture turns up this Javadoc http://coweb.cc.gatech.edu/mediaComp-plan/uploads/95/JavaPicture.html as well as a link to download the Java file.
I'd start there.

How to encode images into a video file in Java through programming?

I am trying to encode some images of same resolution into a video file using, For that I have tried:
jCodec
jcodec..example description
But it is very time consuming and not a proper tool to encode large number of images and it creates a quick time extension.
FFMPEG
FFMPEG..example description
But ffmpeg only able to create video from image files. Images need to be create on physical system.
I have heard Xuggler that its APIs can be used in java program to create video file but as its site seems broken. I am unable to try it.
Does anybody know how to encode images in java format into a video file Please help!
THanks in Advance !
Xuggler is deprecated, use Humble-Video instead. It already comes with some demo projects, including how to take screenshots and convert it to a video file: RecordAndEncodeVideo.java
/*******************************************************************************
* Copyright (c) 2014, Art Clarke. All rights reserved.
* <p>
* This file is part of Humble-Video.
* <p>
* Humble-Video is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* Humble-Video is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* <p>
* You should have received a copy of the GNU Affero General Public License
* along with Humble-Video. If not, see <http://www.gnu.org/licenses/>.
*******************************************************************************/
package io.humble.video.demos;
import io.humble.video.*;
import io.humble.video.awt.MediaPictureConverter;
import io.humble.video.awt.MediaPictureConverterFactory;
import org.apache.commons.cli.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
/**
* Records the contents of your computer screen to a media file for the passed in duration.
* This is meant as a demonstration program to teach the use of the Humble API.
* <p>
* Concepts introduced:
* </p>
* <ul>
* <li>Muxer: A {#link Muxer} object is a container you can write media data to.</li>
* <li>Encoders: An {#link Encoder} object lets you convert {#link MediaAudio} or {#link MediaPicture} objects into {#link MediaPacket} objects
* so they can be written to {#link Muxer} objects.</li>
* </ul>
*
* <p>
* To run from maven, do:
* </p>
* <pre>
* mvn install exec:java -Dexec.mainClass="io.humble.video.demos.RecordAndEncodeVideo" -Dexec.args="filename.mp4"
* </pre>
*
* #author aclarke
*
*/
public class RecordAndEncodeVideo
{
/**
* Records the screen
*/
private static void recordScreen (String filename, String formatname, String codecname, int duration, int snapsPerSecond) throws AWTException, InterruptedException, IOException
{
/**
* Set up the AWT infrastructure to take screenshots of the desktop.
*/
final Robot robot = new Robot();
final Toolkit toolkit = Toolkit.getDefaultToolkit();
final Rectangle screenbounds = new Rectangle(toolkit.getScreenSize());
final Rational framerate = Rational.make(1, snapsPerSecond);
/** First we create a muxer using the passed in filename and formatname if given. */
final Muxer muxer = Muxer.make(filename, null, formatname);
/** Now, we need to decide what type of codec to use to encode video. Muxers
* have limited sets of codecs they can use. We're going to pick the first one that
* works, or if the user supplied a codec name, we're going to force-fit that
* in instead.
*/
final MuxerFormat format = muxer.getFormat();
final Codec codec;
if (codecname != null)
{
codec = Codec.findEncodingCodecByName(codecname);
}
else
{
codec = Codec.findEncodingCodec(format.getDefaultVideoCodecId());
}
/**
* Now that we know what codec, we need to create an encoder
*/
Encoder encoder = Encoder.make(codec);
/**
* Video encoders need to know at a minimum:
* width
* height
* pixel format
* Some also need to know frame-rate (older codecs that had a fixed rate at which video files could
* be written needed this). There are many other options you can set on an encoder, but we're
* going to keep it simpler here.
*/
encoder.setWidth(screenbounds.width);
encoder.setHeight(screenbounds.height);
// We are going to use 420P as the format because that's what most video formats these days use
final PixelFormat.Type pixelformat = PixelFormat.Type.PIX_FMT_YUV420P;
encoder.setPixelFormat(pixelformat);
encoder.setTimeBase(framerate);
/** An annoynace of some formats is that they need global (rather than per-stream) headers,
* and in that case you have to tell the encoder. And since Encoders are decoupled from
* Muxers, there is no easy way to know this beyond
*/
if (format.getFlag(MuxerFormat.Flag.GLOBAL_HEADER))
{
encoder.setFlag(Encoder.Flag.FLAG_GLOBAL_HEADER, true);
}
/** Open the encoder. */
encoder.open(null, null);
/** Add this stream to the muxer. */
muxer.addNewStream(encoder);
/** And open the muxer for business. */
muxer.open(null, null);
/** Next, we need to make sure we have the right MediaPicture format objects
* to encode data with. Java (and most on-screen graphics programs) use some
* variant of Red-Green-Blue image encoding (a.k.a. RGB or BGR). Most video
* codecs use some variant of YCrCb formatting. So we're going to have to
* convert. To do that, we'll introduce a MediaPictureConverter object later. object.
*/
MediaPictureConverter converter = null;
final MediaPicture picture = MediaPicture.make(encoder.getWidth(), encoder.getHeight(), pixelformat);
picture.setTimeBase(framerate);
/** Now begin our main loop of taking screen snaps.
* We're going to encode and then write out any resulting packets. */
final MediaPacket packet = MediaPacket.make();
for (int i = 0; i < duration / framerate.getDouble(); i++)
{
/** Make the screen capture && convert image to TYPE_3BYTE_BGR */
final BufferedImage screen = convertToType(robot.createScreenCapture(screenbounds), BufferedImage.TYPE_3BYTE_BGR);
/** This is LIKELY not in YUV420P format, so we're going to convert it using some handy utilities. */
if (converter == null)
{
converter = MediaPictureConverterFactory.createConverter(screen, picture);
}
converter.toPicture(picture, screen, i);
do
{
encoder.encode(packet, picture);
if (packet.isComplete())
{
muxer.write(packet, false);
}
} while (packet.isComplete());
/** now we'll sleep until it's time to take the next snapshot. */
Thread.sleep((long) (1000 * framerate.getDouble()));
}
/** Encoders, like decoders, sometimes cache pictures so it can do the right key-frame optimizations.
* So, they need to be flushed as well. As with the decoders, the convention is to pass in a null
* input until the output is not complete.
*/
do
{
encoder.encode(packet, null);
if (packet.isComplete())
{
muxer.write(packet, false);
}
} while (packet.isComplete());
/** Finally, let's clean up after ourselves. */
muxer.close();
}
#SuppressWarnings("static-access")
public static void main (String[] args) throws InterruptedException, IOException, AWTException
{
final Options options = new Options();
options.addOption("h", "help", false, "displays help");
options.addOption("v", "version", false, "version of this library");
options.addOption(OptionBuilder.withArgName("format").withLongOpt("format").hasArg().
withDescription("muxer format to use. If unspecified, we will guess from filename").create("f"));
options.addOption(OptionBuilder.withArgName("codec")
.withLongOpt("codec")
.hasArg()
.withDescription("codec to use when encoding video; If unspecified, we will guess from format")
.create("c"));
options.addOption(OptionBuilder.withArgName("duration")
.withLongOpt("duration")
.hasArg()
.withDescription("number of seconds of screenshot to record; defaults to 10.")
.create("d"));
options.addOption(OptionBuilder.withArgName("snaps per second")
.withLongOpt("snaps")
.hasArg()
.withDescription("number of pictures to take per second (i.e. the frame rate); defaults to 5")
.create("s"));
final CommandLineParser parser = new org.apache.commons.cli.BasicParser();
try
{
final CommandLine cmd = parser.parse(options, args);
final String[] parsedArgs = cmd.getArgs();
if (cmd.hasOption("version"))
{
// let's find what version of the library we're running
final String version = io.humble.video_native.Version.getVersionInfo();
System.out.println("Humble Version: " + version);
}
else if (cmd.hasOption("help") || parsedArgs.length != 1)
{
final HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(RecordAndEncodeVideo.class.getCanonicalName() + " <filename>", options);
}
else
{
/**
* Read in some option values and their defaults.
*/
final int duration = Integer.parseInt(cmd.getOptionValue("duration", "10"));
if (duration <= 0)
{
throw new IllegalArgumentException("duration must be > 0");
}
final int snaps = Integer.parseInt(cmd.getOptionValue("snaps", "5"));
if (snaps <= 0)
{
throw new IllegalArgumentException("snaps must be > 0");
}
final String codecname = cmd.getOptionValue("codec");
final String formatname = cmd.getOptionValue("format");
final String filename = cmd.getArgs()[0];
recordScreen(filename, formatname, codecname, duration, snaps);
}
} catch (ParseException e)
{
System.err.println("Exception parsing command line: " + e.getLocalizedMessage());
}
}
/**
* Convert a {#link BufferedImage} of any type, to {#link BufferedImage} of a
* specified type. If the source image is the same type as the target type,
* then original image is returned, otherwise new image of the correct type is
* created and the content of the source image is copied into the new image.
*
* #param sourceImage
* the image to be converted
* #param targetType
* the desired BufferedImage type
*
* #return a BufferedImage of the specifed target type.
*
* #see BufferedImage
*/
public static BufferedImage convertToType (BufferedImage sourceImage, int targetType)
{
BufferedImage image;
// if the source image is already the target type, return the source image
if (sourceImage.getType() == targetType)
{
image = sourceImage;
}
// otherwise create a new image of the target type and draw the new
// image
else
{
image = new BufferedImage(sourceImage.getWidth(), sourceImage.getHeight(), targetType);
image.getGraphics().drawImage(sourceImage, 0, 0, null);
}
return image;
}
}
Check other demos too : humble-video-demos
I am using it for real time using on a webapp.
If you will gonna stream this in real time you will need a RTSP server. You can either use big frameworks like Red 5 Server, Wowza Streaming Engine or you can built your own server using Netty which has a built in RTSP codec since version 3.2.
Using command line, there are various ways to convert image to video. You can use those command in java for saving. You can get those commands from the following link:
Using ffmpeg to convert a set of images into a video
Create a video slideshow from images
I am sharing a code snippet to solve the issue:
code to save png image from HTML5 canvas
Base64 decoder = new Base64();
byte[] pic = decoder.decodeBase64(request.getParameter("pic"));
String frameCount = request.getParameter("frame");
InputStream in = new ByteArrayInputStream(pic);
BufferedImage bImageFromConvert = ImageIO.read(in);
String outdir = "output\\"+frameCount;
//Random rand = new Random();
File file = new File(outdir);
if(file.isFile()){
if(file.delete()){
File writefile = new File(outdir);
ImageIO.write(bImageFromConvert, "png", file);
}
}
Code for creating image from video
String filePath = "D:\\temp\\some.mpg";
String outdir = "output";
File file = new File(outdir);
file.mkdirs();
Map<String, String> m = System.getenv();
/*
* String command[] =
* {"D:\\ffmpeg-win32-static\\bin\\ffmpeg","-i",filePath
* ,"-r 30","-f","image2",outdir,"\\user%03d.jpg"};
*
* ProcessBuilder pb = new ProcessBuilder(command); pb.start();
*/
String commands = "D:\\ffmpeg-win32-static\\bin\\ffmpeg -i " + filePath
+ " -r 30 -f image2 " + outdir + "\\image%5d.png";
Process p = Runtime.getRuntime().exec(commands);
code for creating video from image
String filePath = "output";
File fileP = new File(filePath);
String commands = "D:\\ffmpeg-win32-static\\bin\\ffmpeg -f image2 -i "
+ fileP + "\\image%5d.png " + fileP + "\\video.mp4";
System.out.println(commands);
Runtime.getRuntime().exec(commands);
System.out.println(fileP.getAbsolutePath());
Credit goes to #yashprit
Another approach for Android developers:
Create a temporary folder inside the Android.
Copy your images in the new folder
First, rename your pictures to follow a numerical sequence. For
example, img1.jpg, img2.jpg, img3.jpg,... Then you may run:
Run this program programmetcally ffmpeg -f image2 -i img%d.jpg
/tmp/a.mpg To run this programmatically,
Use the following code:
void convertImg_to_vid()
{
Process chperm;
try {
chperm=Runtime.getRuntime().exec("su");
DataOutputStream os =
new DataOutputStream(chperm.getOutputStream());
os.writeBytes("ffmpeg -f image2 -i img%d.jpg /tmp/a.mpg\n");
os.flush();
chperm.waitFor();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Resource Link:
Create a Video file from images using ffmpeg
There is a utility in Java Media Framework which, It can create Video from List of Jpeg Images Link
Here is the source code:
JpegImagesToMovie.java
/*
* #(#)JpegImagesToMovie.java 1.3 01/03/13
* Copyright (c) 1999-2001 Sun Microsystems, Inc. All Rights Reserved.
* Sun grants you ("Licensee") a non-exclusive, royalty free, license to use,
* modify and redistribute this software in source and binary code form,
* provided that i) this copyright notice and license appear on all copies of
* the software; and ii) Licensee does not utilize the software in a manner
* which is disparaging to Sun.
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY
* IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE
* LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
* OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS
* LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT,
* INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
* CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF
* OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*
* This software is not designed or intended for use in on-line control of
* aircraft, air traffic, aircraft navigation or aircraft communications; or in
* the design, construction, operation or maintenance of any nuclear
* facility. Licensee represents and warrants that it will not use or
* redistribute the Software for such purposes.
*/
package imagetovideo;
import java.awt.Dimension;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.MalformedURLException;
import java.util.Vector;
import javax.media.Buffer;
import javax.media.ConfigureCompleteEvent;
import javax.media.ControllerEvent;
import javax.media.ControllerListener;
import javax.media.DataSink;
import javax.media.EndOfMediaEvent;
import javax.media.Format;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.PrefetchCompleteEvent;
import javax.media.Processor;
import javax.media.RealizeCompleteEvent;
import javax.media.ResourceUnavailableEvent;
import javax.media.Time;
import javax.media.control.TrackControl;
import javax.media.datasink.DataSinkErrorEvent;
import javax.media.datasink.DataSinkEvent;
import javax.media.datasink.DataSinkListener;
import javax.media.datasink.EndOfStreamEvent;
import javax.media.format.VideoFormat;
import javax.media.protocol.ContentDescriptor;
import javax.media.protocol.DataSource;
import javax.media.protocol.FileTypeDescriptor;
import javax.media.protocol.PullBufferDataSource;
import javax.media.protocol.PullBufferStream;
/**
* This program takes a list of JPEG image files and convert them into a
* QuickTime movie.
*/
public class JpegImagesToMovie implements ControllerListener, DataSinkListener {
public boolean doIt(int width, int height, int frameRate, Vector inFiles,
MediaLocator outML) throws MalformedURLException {
ImageDataSource ids = new ImageDataSource(width, height, frameRate,
inFiles);
Processor p;
try {
//System.err
// .println("- create processor for the image datasource ...");
p = Manager.createProcessor(ids);
} catch (Exception e) {
System.err
.println("Yikes! Cannot create a processor from the data source.");
return false;
}
p.addControllerListener(this);
// Put the Processor into configured state so we can set
// some processing options on the processor.
p.configure();
if (!waitForState(p, p.Configured)) {
System.err.println("Failed to configure the processor.");
return false;
}
// Set the output content descriptor to QuickTime.
p.setContentDescriptor(new ContentDescriptor(
FileTypeDescriptor.QUICKTIME));
// Query for the processor for supported formats.
// Then set it on the processor.
TrackControl tcs[] = p.getTrackControls();
Format f[] = tcs[0].getSupportedFormats();
if (f == null || f.length <= 0) {
System.err.println("The mux does not support the input format: "
+ tcs[0].getFormat());
return false;
}
tcs[0].setFormat(f[0]);
//System.err.println("Setting the track format to: " + f[0]);
// We are done with programming the processor. Let's just
// realize it.
p.realize();
if (!waitForState(p, p.Realized)) {
System.err.println("Failed to realize the processor.");
return false;
}
// Now, we'll need to create a DataSink.
DataSink dsink;
if ((dsink = createDataSink(p, outML)) == null) {
System.err
.println("Failed to create a DataSink for the given output MediaLocator: "
+ outML);
return false;
}
dsink.addDataSinkListener(this);
fileDone = false;
System.out.println("Generating the video : "+outML.getURL().toString());
// OK, we can now start the actual transcoding.
try {
p.start();
dsink.start();
} catch (IOException e) {
System.err.println("IO error during processing");
return false;
}
// Wait for EndOfStream event.
waitForFileDone();
// Cleanup.
try {
dsink.close();
} catch (Exception e) {
}
p.removeControllerListener(this);
System.out.println("Video creation completed!!!!!");
return true;
}
/**
* Create the DataSink.
*/
DataSink createDataSink(Processor p, MediaLocator outML) {
DataSource ds;
if ((ds = p.getDataOutput()) == null) {
System.err
.println("Something is really wrong: the processor does not have an output DataSource");
return null;
}
DataSink dsink;
try {
//System.err.println("- create DataSink for: " + outML);
dsink = Manager.createDataSink(ds, outML);
dsink.open();
} catch (Exception e) {
System.err.println("Cannot create the DataSink: " + e);
return null;
}
return dsink;
}
Object waitSync = new Object();
boolean stateTransitionOK = true;
/**
* Block until the processor has transitioned to the given state. Return
* false if the transition failed.
*/
boolean waitForState(Processor p, int state) {
synchronized (waitSync) {
try {
while (p.getState() < state && stateTransitionOK)
waitSync.wait();
} catch (Exception e) {
}
}
return stateTransitionOK;
}
/**
* Controller Listener.
*/
public void controllerUpdate(ControllerEvent evt) {
if (evt instanceof ConfigureCompleteEvent
|| evt instanceof RealizeCompleteEvent
|| evt instanceof PrefetchCompleteEvent) {
synchronized (waitSync) {
stateTransitionOK = true;
waitSync.notifyAll();
}
} else if (evt instanceof ResourceUnavailableEvent) {
synchronized (waitSync) {
stateTransitionOK = false;
waitSync.notifyAll();
}
} else if (evt instanceof EndOfMediaEvent) {
evt.getSourceController().stop();
evt.getSourceController().close();
}
}
Object waitFileSync = new Object();
boolean fileDone = false;
boolean fileSuccess = true;
/**
* Block until file writing is done.
*/
boolean waitForFileDone() {
synchronized (waitFileSync) {
try {
while (!fileDone)
waitFileSync.wait();
} catch (Exception e) {
}
}
return fileSuccess;
}
/**
* Event handler for the file writer.
*/
public void dataSinkUpdate(DataSinkEvent evt) {
if (evt instanceof EndOfStreamEvent) {
synchronized (waitFileSync) {
fileDone = true;
waitFileSync.notifyAll();
}
} else if (evt instanceof DataSinkErrorEvent) {
synchronized (waitFileSync) {
fileDone = true;
fileSuccess = false;
waitFileSync.notifyAll();
}
}
}
/*public static void main(String args[]) {
if (args.length == 0)
prUsage();
// Parse the arguments.
int i = 0;
int width = -1, height = -1, frameRate = 1;
Vector inputFiles = new Vector();
String outputURL = null;
while (i < args.length) {
if (args[i].equals("-w")) {
i++;
if (i >= args.length)
prUsage();
width = new Integer(args[i]).intValue();
} else if (args[i].equals("-h")) {
i++;
if (i >= args.length)
prUsage();
height = new Integer(args[i]).intValue();
} else if (args[i].equals("-f")) {
i++;
if (i >= args.length)
prUsage();
frameRate = new Integer(args[i]).intValue();
} else if (args[i].equals("-o")) {
i++;
if (i >= args.length)
prUsage();
outputURL = args[i];
} else {
inputFiles.addElement(args[i]);
}
i++;
}
if (outputURL == null || inputFiles.size() == 0)
prUsage();
// Check for output file extension.
if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
System.err
.println("The output file extension should end with a .mov extension");
prUsage();
}
if (width < 0 || height < 0) {
System.err.println("Please specify the correct image size.");
prUsage();
}
// Check the frame rate.
if (frameRate < 1)
frameRate = 1;
// Generate the output media locators.
MediaLocator oml;
if ((oml = createMediaLocator(outputURL)) == null) {
System.err.println("Cannot build media locator from: " + outputURL);
System.exit(0);
}
JpegImagesToMovie imageToMovie = new JpegImagesToMovie();
imageToMovie.doIt(width, height, frameRate, inputFiles, oml);
System.exit(0);
}*/
static void prUsage() {
System.err
.println("Usage: java JpegImagesToMovie -w <width> -h <height> -f <frame rate> -o <output URL> <input JPEG file 1> <input JPEG file 2> ...");
System.exit(-1);
}
/**
* Create a media locator from the given string.
*/
static MediaLocator createMediaLocator(String url) {
MediaLocator ml;
if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null)
return ml;
if (url.startsWith(File.separator)) {
if ((ml = new MediaLocator("file:" + url)) != null)
return ml;
} else {
String file = "file:" + System.getProperty("user.dir")
+ File.separator + url;
if ((ml = new MediaLocator(file)) != null)
return ml;
}
return null;
}
// /////////////////////////////////////////////
//
// Inner classes.
// /////////////////////////////////////////////
/**
* A DataSource to read from a list of JPEG image files and turn that into a
* stream of JMF buffers. The DataSource is not seekable or positionable.
*/
class ImageDataSource extends PullBufferDataSource {
ImageSourceStream streams[];
ImageDataSource(int width, int height, int frameRate, Vector images) {
streams = new ImageSourceStream[1];
streams[0] = new ImageSourceStream(width, height, frameRate, images);
}
public void setLocator(MediaLocator source) {
}
public MediaLocator getLocator() {
return null;
}
/**
* Content type is of RAW since we are sending buffers of video frames
* without a container format.
*/
public String getContentType() {
return ContentDescriptor.RAW;
}
public void connect() {
}
public void disconnect() {
}
public void start() {
}
public void stop() {
}
/**
* Return the ImageSourceStreams.
*/
public PullBufferStream[] getStreams() {
return streams;
}
/**
* We could have derived the duration from the number of frames and
* frame rate. But for the purpose of this program, it's not necessary.
*/
public Time getDuration() {
return DURATION_UNKNOWN;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
/**
* The source stream to go along with ImageDataSource.
*/
class ImageSourceStream implements PullBufferStream {
Vector images;
int width, height;
VideoFormat format;
int nextImage = 0; // index of the next image to be read.
boolean ended = false;
public ImageSourceStream(int width, int height, int frameRate,
Vector images) {
this.width = width;
this.height = height;
this.images = images;
format = new VideoFormat(VideoFormat.JPEG, new Dimension(width,
height), Format.NOT_SPECIFIED, Format.byteArray,
(float) frameRate);
}
/**
* We should never need to block assuming data are read from files.
*/
public boolean willReadBlock() {
return false;
}
/**
* This is called from the Processor to read a frame worth of video
* data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (nextImage >= images.size()) {
// We are done. Set EndOfMedia.
//System.err.println("Done reading all images.");
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
String imageFile = (String) images.elementAt(nextImage);
nextImage++;
//System.err.println(" - reading image file: " + imageFile);
// Open a random access file for the next image.
RandomAccessFile raFile;
raFile = new RandomAccessFile(imageFile, "r");
byte data[] = null;
// Check the input buffer type & size.
if (buf.getData() instanceof byte[])
data = (byte[]) buf.getData();
// Check to see the given buffer is big enough for the frame.
if (data == null || data.length < raFile.length()) {
data = new byte[(int) raFile.length()];
buf.setData(data);
}
// Read the entire JPEG image from the file.
raFile.readFully(data, 0, (int) raFile.length());
//System.err.println(" read " + raFile.length() + " bytes.");
buf.setOffset(0);
buf.setLength((int) raFile.length());
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
// Close the random access file.
raFile.close();
}
/**
* Return the format of each video frame. That will be JPEG.
*/
public Format getFormat() {
return format;
}
public ContentDescriptor getContentDescriptor() {
return new ContentDescriptor(ContentDescriptor.RAW);
}
public long getContentLength() {
return 0;
}
public boolean endOfStream() {
return ended;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
}
Its doIt function can be called from another class having main function:
CreatVideo.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package imagetovideo;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Vector;
import javax.media.MediaLocator;
public class CreateVideo{
public static final File dir = new File("D:\\imagesFolder\\");
public static final String[] extensions = new String[]{"jpg", "png"};
public static final FilenameFilter imageFilter = new FilenameFilter() {
#Override
public boolean accept(final File dir, String name) {
for (final String ext : extensions) {
if (name.endsWith("." + ext)) {
return (true);
}
}
return (false);
}
};
// Main function
public static void main(String[] args) throws IOException {
File file = new File("D:\\a.mp4");
if (!file.exists()) {
file.createNewFile();
}
Vector<String> imgLst = new Vector<>();
if (dir.isDirectory()) {
int counter = 1;
for (final File f : dir.listFiles(imageFilter)) {
imgLst.add(f.getAbsolutePath());
}
}
makeVideo("file:\\" + file.getAbsolutePath(), imgLst);
}
public static void makeVideo(String fileName, Vector imgLst) throws MalformedURLException {
JpegImagesToMovie imageToMovie = new JpegImagesToMovie();
MediaLocator oml;
if ((oml = imageToMovie.createMediaLocator(fileName)) == null) {
System.err.println("Cannot build media locator from: " + fileName);
System.exit(0);
}
int interval = 40;
imageToMovie.doIt(720, 360, (1000 / interval), imgLst, oml);
}
}
Requirements:
Include jmf-2.1.1e.jar in your Library Folder (using this library)

How to create a mp4 video from images in Java by using JCodec library?

I am doing the research about how to create a mp4 video from images in Java. After a few days researching, I know that JCodec can do it (http://jcodec.org/). Here is the demonstration I found on Android make animated video from list of images (I only changed the input and output link):
private SeekableByteChannel ch;
private Picture toEncode;
private RgbToYuv420 transform;
private H264Encoder encoder;
private ArrayList<ByteBuffer> spsList;
private ArrayList<ByteBuffer> ppsList;
private CompressedTrack outTrack;
private ByteBuffer _out;
private int frameNo;
private MP4Muxer muxer;
public SequenceImagesEncoder(File out) throws IOException {
this.ch = NIOUtils.writableFileChannel(out);
// Transform to convert between RGB and YUV
transform = new RgbToYuv420(0, 0);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, 25);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(1920 * 1080 * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
public void encodeImage(BufferedImage bi) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++)
Arrays.fill(toEncode.getData()[i], 0);
transform.transform(AWTUtil.fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
public void finish() throws IOException {
// Push saved SPS/PPS to a special storage in MP4
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList));
// Write MP4 header and finalize recording
muxer.writeHeader();
NIOUtils.closeQuietly(ch);
}
public static void main(String[] args) throws IOException {
SequenceImagesEncoder encoder = new SequenceImagesEncoder(new File("D:/workspace/JCodecMakeMP4/out.mp4"));
for (int i = 1; i < 100; i++) {
BufferedImage bi = ImageIO.read(new File(String.format("D:/workspace/JCodecMakeMP4/bin/frame" + i + ".jpeg", i)));
encoder.encodeImage(bi);
}
encoder.finish();
}
When I use jcodec-0.1.0.jar, the class NIOUtils does not have a function: writableFileChannel(File file).
When I use jcodec-0.1.3.jar, everything seem to be ok, but I debug the code, it lead to "Source Not Found" when i move to line: muxer = new MP4Muxer(ch, Brand.MP4);
Does anyone know how to fix it.
Thank you in advance!
It is just share my experience. I use JpegImagesToMovie to solve like your problem.
For more reference JpegImagesToMovie.
Sample Program
public static void makeVideo(String fileName) throws MalformedURLException {
Vector<String> imgLst = get images path list.
JpegImagesToMovie imageToMovie = new JpegImagesToMovie();
MediaLocator oml;
if ((oml = imageToMovie.createMediaLocator(fileName)) == null) {
System.err.println("Cannot build media locator from: " + fileName);
System.exit(0);
}
int interval = 50;
imageToMovie.doIt(screenWidth, screenHeight, (1000 / interval), imgLst, oml);
}
JpegImagesToMovie.java
/*
* #(#)JpegImagesToMovie.java 1.3 01/03/13
*
* Copyright (c) 1999-2001 Sun Microsystems, Inc. All Rights Reserved.
*
* Sun grants you ("Licensee") a non-exclusive, royalty free, license to use,
* modify and redistribute this software in source and binary code form,
* provided that i) this copyright notice and license appear on all copies of
* the software; and ii) Licensee does not utilize the software in a manner
* which is disparaging to Sun.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY
* IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE
* LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
* OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS
* LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT,
* INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
* CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF
* OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*
* This software is not designed or intended for use in on-line control of
* aircraft, air traffic, aircraft navigation or aircraft communications; or in
* the design, construction, operation or maintenance of any nuclear
* facility. Licensee represents and warrants that it will not use or
* redistribute the Software for such purposes.
*/
import java.awt.Dimension;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.MalformedURLException;
import java.util.Vector;
import javax.media.Buffer;
import javax.media.ConfigureCompleteEvent;
import javax.media.ControllerEvent;
import javax.media.ControllerListener;
import javax.media.DataSink;
import javax.media.EndOfMediaEvent;
import javax.media.Format;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.PrefetchCompleteEvent;
import javax.media.Processor;
import javax.media.RealizeCompleteEvent;
import javax.media.ResourceUnavailableEvent;
import javax.media.Time;
import javax.media.control.TrackControl;
import javax.media.datasink.DataSinkErrorEvent;
import javax.media.datasink.DataSinkEvent;
import javax.media.datasink.DataSinkListener;
import javax.media.datasink.EndOfStreamEvent;
import javax.media.format.VideoFormat;
import javax.media.protocol.ContentDescriptor;
import javax.media.protocol.DataSource;
import javax.media.protocol.FileTypeDescriptor;
import javax.media.protocol.PullBufferDataSource;
import javax.media.protocol.PullBufferStream;
/**
* This program takes a list of JPEG image files and convert them into a
* QuickTime movie.
*/
public class JpegImagesToMovie implements ControllerListener, DataSinkListener {
public boolean doIt(int width, int height, int frameRate, Vector inFiles,
MediaLocator outML) throws MalformedURLException {
ImageDataSource ids = new ImageDataSource(width, height, frameRate,
inFiles);
Processor p;
try {
//System.err
// .println("- create processor for the image datasource ...");
p = Manager.createProcessor(ids);
} catch (Exception e) {
System.err
.println("Yikes! Cannot create a processor from the data source.");
return false;
}
p.addControllerListener(this);
// Put the Processor into configured state so we can set
// some processing options on the processor.
p.configure();
if (!waitForState(p, p.Configured)) {
System.err.println("Failed to configure the processor.");
return false;
}
// Set the output content descriptor to QuickTime.
p.setContentDescriptor(new ContentDescriptor(
FileTypeDescriptor.QUICKTIME));
// Query for the processor for supported formats.
// Then set it on the processor.
TrackControl tcs[] = p.getTrackControls();
Format f[] = tcs[0].getSupportedFormats();
if (f == null || f.length <= 0) {
System.err.println("The mux does not support the input format: "
+ tcs[0].getFormat());
return false;
}
tcs[0].setFormat(f[0]);
//System.err.println("Setting the track format to: " + f[0]);
// We are done with programming the processor. Let's just
// realize it.
p.realize();
if (!waitForState(p, p.Realized)) {
System.err.println("Failed to realize the processor.");
return false;
}
// Now, we'll need to create a DataSink.
DataSink dsink;
if ((dsink = createDataSink(p, outML)) == null) {
System.err
.println("Failed to create a DataSink for the given output MediaLocator: "
+ outML);
return false;
}
dsink.addDataSinkListener(this);
fileDone = false;
System.out.println("Generating the video : "+outML.getURL().toString());
// OK, we can now start the actual transcoding.
try {
p.start();
dsink.start();
} catch (IOException e) {
System.err.println("IO error during processing");
return false;
}
// Wait for EndOfStream event.
waitForFileDone();
// Cleanup.
try {
dsink.close();
} catch (Exception e) {
}
p.removeControllerListener(this);
System.out.println("Video creation completed!!!!!");
return true;
}
/**
* Create the DataSink.
*/
DataSink createDataSink(Processor p, MediaLocator outML) {
DataSource ds;
if ((ds = p.getDataOutput()) == null) {
System.err
.println("Something is really wrong: the processor does not have an output DataSource");
return null;
}
DataSink dsink;
try {
//System.err.println("- create DataSink for: " + outML);
dsink = Manager.createDataSink(ds, outML);
dsink.open();
} catch (Exception e) {
System.err.println("Cannot create the DataSink: " + e);
return null;
}
return dsink;
}
Object waitSync = new Object();
boolean stateTransitionOK = true;
/**
* Block until the processor has transitioned to the given state. Return
* false if the transition failed.
*/
boolean waitForState(Processor p, int state) {
synchronized (waitSync) {
try {
while (p.getState() < state && stateTransitionOK)
waitSync.wait();
} catch (Exception e) {
}
}
return stateTransitionOK;
}
/**
* Controller Listener.
*/
public void controllerUpdate(ControllerEvent evt) {
if (evt instanceof ConfigureCompleteEvent
|| evt instanceof RealizeCompleteEvent
|| evt instanceof PrefetchCompleteEvent) {
synchronized (waitSync) {
stateTransitionOK = true;
waitSync.notifyAll();
}
} else if (evt instanceof ResourceUnavailableEvent) {
synchronized (waitSync) {
stateTransitionOK = false;
waitSync.notifyAll();
}
} else if (evt instanceof EndOfMediaEvent) {
evt.getSourceController().stop();
evt.getSourceController().close();
}
}
Object waitFileSync = new Object();
boolean fileDone = false;
boolean fileSuccess = true;
/**
* Block until file writing is done.
*/
boolean waitForFileDone() {
synchronized (waitFileSync) {
try {
while (!fileDone)
waitFileSync.wait();
} catch (Exception e) {
}
}
return fileSuccess;
}
/**
* Event handler for the file writer.
*/
public void dataSinkUpdate(DataSinkEvent evt) {
if (evt instanceof EndOfStreamEvent) {
synchronized (waitFileSync) {
fileDone = true;
waitFileSync.notifyAll();
}
} else if (evt instanceof DataSinkErrorEvent) {
synchronized (waitFileSync) {
fileDone = true;
fileSuccess = false;
waitFileSync.notifyAll();
}
}
}
/*public static void main(String args[]) {
if (args.length == 0)
prUsage();
// Parse the arguments.
int i = 0;
int width = -1, height = -1, frameRate = 1;
Vector inputFiles = new Vector();
String outputURL = null;
while (i < args.length) {
if (args[i].equals("-w")) {
i++;
if (i >= args.length)
prUsage();
width = new Integer(args[i]).intValue();
} else if (args[i].equals("-h")) {
i++;
if (i >= args.length)
prUsage();
height = new Integer(args[i]).intValue();
} else if (args[i].equals("-f")) {
i++;
if (i >= args.length)
prUsage();
frameRate = new Integer(args[i]).intValue();
} else if (args[i].equals("-o")) {
i++;
if (i >= args.length)
prUsage();
outputURL = args[i];
} else {
inputFiles.addElement(args[i]);
}
i++;
}
if (outputURL == null || inputFiles.size() == 0)
prUsage();
// Check for output file extension.
if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
System.err
.println("The output file extension should end with a .mov extension");
prUsage();
}
if (width < 0 || height < 0) {
System.err.println("Please specify the correct image size.");
prUsage();
}
// Check the frame rate.
if (frameRate < 1)
frameRate = 1;
// Generate the output media locators.
MediaLocator oml;
if ((oml = createMediaLocator(outputURL)) == null) {
System.err.println("Cannot build media locator from: " + outputURL);
System.exit(0);
}
JpegImagesToMovie imageToMovie = new JpegImagesToMovie();
imageToMovie.doIt(width, height, frameRate, inputFiles, oml);
System.exit(0);
}*/
static void prUsage() {
System.err
.println("Usage: java JpegImagesToMovie -w <width> -h <height> -f <frame rate> -o <output URL> <input JPEG file 1> <input JPEG file 2> ...");
System.exit(-1);
}
/**
* Create a media locator from the given string.
*/
static MediaLocator createMediaLocator(String url) {
MediaLocator ml;
if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null)
return ml;
if (url.startsWith(File.separator)) {
if ((ml = new MediaLocator("file:" + url)) != null)
return ml;
} else {
String file = "file:" + System.getProperty("user.dir")
+ File.separator + url;
if ((ml = new MediaLocator(file)) != null)
return ml;
}
return null;
}
// /////////////////////////////////////////////
//
// Inner classes.
// /////////////////////////////////////////////
/**
* A DataSource to read from a list of JPEG image files and turn that into a
* stream of JMF buffers. The DataSource is not seekable or positionable.
*/
class ImageDataSource extends PullBufferDataSource {
ImageSourceStream streams[];
ImageDataSource(int width, int height, int frameRate, Vector images) {
streams = new ImageSourceStream[1];
streams[0] = new ImageSourceStream(width, height, frameRate, images);
}
public void setLocator(MediaLocator source) {
}
public MediaLocator getLocator() {
return null;
}
/**
* Content type is of RAW since we are sending buffers of video frames
* without a container format.
*/
public String getContentType() {
return ContentDescriptor.RAW;
}
public void connect() {
}
public void disconnect() {
}
public void start() {
}
public void stop() {
}
/**
* Return the ImageSourceStreams.
*/
public PullBufferStream[] getStreams() {
return streams;
}
/**
* We could have derived the duration from the number of frames and
* frame rate. But for the purpose of this program, it's not necessary.
*/
public Time getDuration() {
return DURATION_UNKNOWN;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
/**
* The source stream to go along with ImageDataSource.
*/
class ImageSourceStream implements PullBufferStream {
Vector images;
int width, height;
VideoFormat format;
int nextImage = 0; // index of the next image to be read.
boolean ended = false;
public ImageSourceStream(int width, int height, int frameRate,
Vector images) {
this.width = width;
this.height = height;
this.images = images;
format = new VideoFormat(VideoFormat.JPEG, new Dimension(width,
height), Format.NOT_SPECIFIED, Format.byteArray,
(float) frameRate);
}
/**
* We should never need to block assuming data are read from files.
*/
public boolean willReadBlock() {
return false;
}
/**
* This is called from the Processor to read a frame worth of video
* data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (nextImage >= images.size()) {
// We are done. Set EndOfMedia.
//System.err.println("Done reading all images.");
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
String imageFile = (String) images.elementAt(nextImage);
nextImage++;
//System.err.println(" - reading image file: " + imageFile);
// Open a random access file for the next image.
RandomAccessFile raFile;
raFile = new RandomAccessFile(imageFile, "r");
byte data[] = null;
// Check the input buffer type & size.
if (buf.getData() instanceof byte[])
data = (byte[]) buf.getData();
// Check to see the given buffer is big enough for the frame.
if (data == null || data.length < raFile.length()) {
data = new byte[(int) raFile.length()];
buf.setData(data);
}
// Read the entire JPEG image from the file.
raFile.readFully(data, 0, (int) raFile.length());
//System.err.println(" read " + raFile.length() + " bytes.");
buf.setOffset(0);
buf.setLength((int) raFile.length());
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
// Close the random access file.
raFile.close();
}
/**
* Return the format of each video frame. That will be JPEG.
*/
public Format getFormat() {
return format;
}
public ContentDescriptor getContentDescriptor() {
return new ContentDescriptor(ContentDescriptor.RAW);
}
public long getContentLength() {
return 0;
}
public boolean endOfStream() {
return ended;
}
public Object[] getControls() {
return new Object[0];
}
public Object getControl(String type) {
return null;
}
}
}

A couple of questions about a music project in Java

I have a couple of questions about a school project I'm working on. The code is as follows.
public class Player
{
private PlayList playList;
private Track track;
private int tracksPlayed;
private int totalTrackTime;
private double averageTrackTime;
/**
* Constructor ...
*/
public Player()
{
playList = new PlayList("audio");
track = playList.getTrack(0);
this.tracksPlayed = tracksPlayed;
this.totalTrackTime = totalTrackTime;
this.averageTrackTime = averageTrackTime;
}
/**
* Return the track collection currently loaded in this player.
*/
public PlayList getPlayList()
{
return playList;
}
/**
*
*/
public void play()
{
track.play();
tracksPlayed++;
int trackDuration = track.getDuration();
totalTrackTime = totalTrackTime + trackDuration;
averageTrackTime = totalTrackTime / tracksPlayed;
}
/**
*
*/
public void stop()
{
track.stop();
}
/**
*
*/
public void setTrack(int trackNumber)
{
int currentTrack = trackNumber;
track = playList.getTrack(currentTrack);
}
/**
*
*/
public String getTrackName()
{
String currentTrack = track.getName();
return currentTrack;
}
/**
*
*/
public String getTrackInfo()
{
String currentTrack = track.getName();
int trackDuration = track.getDuration();
return currentTrack + " " + "(" + trackDuration + ")";
}
/**
*
*/
public int getNumberOfTracksPlayed()
{
return tracksPlayed;
}
/**
*
*/
public int getTotalPlayedTrackLength()
{
return totalTrackTime;
}
/**
*
*/
public double averageTrackLength()
{
return averageTrackTime;
}
}
public class Track
{
private Clip soundClip;
private String name;
/**
* Create a track from an audio file.
*/
public Track(File soundFile)
{
soundClip = loadSound(soundFile);
name = soundFile.getName();
}
/**
* Play this sound track. (The sound will play asynchronously, until
* it is stopped or reaches the end.)
*/
public void play()
{
if(soundClip != null) {
soundClip.start();
}
}
/**
* Stop this track playing. (This method has no effect if the track is not
* currently playing.)
*/
public void stop()
{
if(soundClip != null) {
soundClip.stop();
}
}
/**
* Reset this track to its start.
*/
public void rewind()
{
if(soundClip != null) {
soundClip.setFramePosition(0);
}
}
/**
* Return the name of this track.
*/
public String getName()
{
return name;
}
/**
* Return the duration of this track, in seconds.
*/
public int getDuration()
{
if (soundClip == null) {
return 0;
}
else {
return (int) soundClip.getMicrosecondLength()/1000000;
}
}
/**
* Set the playback volume of the current track.
*
* #param vol Volume level as a percentage (0..100).
*/
public void setVolume(int vol)
{
if(soundClip == null) {
return;
}
if(vol < 0 || vol > 100) {
vol = 100;
}
double val = vol / 100.0;
try {
FloatControl volControl =
(FloatControl) soundClip.getControl(FloatControl.Type.MASTER_GAIN);
float dB = (float)(Math.log(val == 0.0 ? 0.0001 : val) / Math.log(10.0) * 20.0);
volControl.setValue(dB);
} catch (Exception ex) {
System.err.println("Error: could not set volume");
}
}
/**
* Return true if this track has successfully loaded and can be played.
*/
public boolean isValid()
{
return soundClip != null;
}
/**
* Load the sound file supplied by the parameter.
*
* #return The sound clip if successful, null if the file could not be decoded.
*/
private Clip loadSound(File file)
{
Clip newClip;
try {
AudioInputStream stream = AudioSystem.getAudioInputStream(file);
AudioFormat format = stream.getFormat();
// we cannot play ALAW/ULAW, so we convert them to PCM
//
if ((format.getEncoding() == AudioFormat.Encoding.ULAW) ||
(format.getEncoding() == AudioFormat.Encoding.ALAW))
{
AudioFormat tmp = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
format.getSampleRate(),
format.getSampleSizeInBits() * 2,
format.getChannels(),
format.getFrameSize() * 2,
format.getFrameRate(),
true);
stream = AudioSystem.getAudioInputStream(tmp, stream);
format = tmp;
}
DataLine.Info info = new DataLine.Info(Clip.class,
stream.getFormat(),
((int) stream.getFrameLength() *
format.getFrameSize()));
newClip = (Clip) AudioSystem.getLine(info);
newClip.open(stream);
return newClip;
} catch (Exception ex) {
return null;
}
}
}
public class PlayList
{
private List<Track> tracks;
/**
* Constructor for objects of class TrackCollection
*/
public PlayList(String directoryName)
{
tracks = loadTracks(directoryName);
}
/**
* Return a track from this collection.
*/
public Track getTrack(int trackNumber)
{
return tracks.get(trackNumber);
}
/**
* Return the number of tracks in this collection.
*/
public int numberOfTracks()
{
return tracks.size();
}
/**
* Load the file names of all files in the given directory.
* #param dirName Directory (folder) name.
* #param suffix File suffix of interest.
* #return The names of files found.
*/
private List<Track> loadTracks(String dirName)
{
File dir = new File(dirName);
if(dir.isDirectory()) {
File[] allFiles = dir.listFiles();
List<Track> foundTracks = new ArrayList<Track>();
for(File file : allFiles) {
//System.out.println("found: " + file);
Track track = new Track(file);
if(track.isValid()) {
foundTracks.add(track);
}
}
return foundTracks;
}
else {
System.err.println("Error: " + dirName + " must be a directory");
return null;
}
}
/**
* Return this playlist as an array of strings with the track names.
*/
public String[] asStrings()
{
String[] names = new String[tracks.size()];
int i = 0;
for(Track track : tracks) {
names[i++] = track.getName();
}
return names;
}
}
I understand that to call the play methods in the player class, I have to initialize a Track class variable. I also need to initialize it using the PlayList getTrack method. However, how can I initialize it without defining a starting index variable (i.e I don't want it to automatically initialize to a specific song in the index, I want the user to have to select a song first)?
Also, how do I code the play method in the player class to stop a existing song if one is playing before starting a new song and to restart a song if the play method is called again one the same song?
1.
how can I initialize it without defining a starting index variable
public Player (int initTrack){
...
track = playList.getTrack(initTrack);
...
}
2.
You should try with a field storing the track that it is currently playing!

Implementation of Xmodem protocol in Java

How would I receive a file over a serial port in Java using the XMODEM protocol?
Here it is.
I found this in the JModem source. If you look at where it writes the data out, you can see its doing an SOH, blocknum, ~blocknum, data, and checksum. It uses a sector size of 128. Those together make up the standard XModem protocol. Its simple enough to do XModem1K, YModem, and ZModem from here, too.
/**
* a tiny version of Ward Christensen's MODEM program for UNIX.
* Written ~ 1980 by Andrew Scott Beals. Last revised 1982.
* A.D. 2000 - dragged from the archives for use in Java Cookbook.
*
* #author C version by Andrew Scott Beals, sjobrg.andy%mit-oz#mit-mc.arpa.
* #author Java version by Ian F. Darwin, ian#darwinsys.com
* $Id: TModem.java,v 1.8 2000/03/02 03:40:50 ian Exp $
*/
class TModem {
protected final byte CPMEOF = 26; /* control/z */
protected final int MAXERRORS = 10; /* max times to retry one block */
protected final int SECSIZE = 128; /* cpm sector, transmission block */
protected final int SENTIMOUT = 30; /* timeout time in send */
protected final int SLEEP = 30; /* timeout time in recv */
/* Protocol characters used */
protected final byte SOH = 1; /* Start Of Header */
protected final byte EOT = 4; /* End Of Transmission */
protected final byte ACK = 6; /* ACKnowlege */
protected final byte NAK = 0x15; /* Negative AcKnowlege */
protected InputStream inStream;
protected OutputStream outStream;
protected PrintWriter errStream;
/** Construct a TModem */
public TModem(InputStream is, OutputStream os, PrintWriter errs) {
inStream = is;
outStream = os;
errStream = errs;
}
/** Construct a TModem with default files (stdin and stdout). */
public TModem() {
inStream = System.in;
outStream = System.out;
errStream = new PrintWriter(System.err);
}
/** A main program, for direct invocation. */
public static void main(String[] argv) throws
IOException, InterruptedException {
/* argc must == 2, i.e., `java TModem -s filename' */
if (argv.length != 2)
usage();
if (argv[0].charAt(0) != '-')
usage();
TModem tm = new TModem();
tm.setStandalone(true);
boolean OK = false;
switch (argv[0].charAt(1)){
case 'r':
OK = tm.receive(argv[1]);
break;
case 's':
OK = tm.send(argv[1]);
break;
default:
usage();
}
System.out.print(OK?"Done OK":"Failed");
System.exit(0);
}
/* give user minimal usage message */
protected static void usage()
{
System.err.println("usage: TModem -r/-s file");
// not errStream, not die(), since this is static.
System.exit(1);
}
/** If we're in a standalone app it is OK to System.exit() */
protected boolean standalone = false;
public void setStandalone(boolean is) {
standalone = is;
}
public boolean isStandalone() {
return standalone;
}
/** A flag used to communicate with inner class IOTimer */
protected boolean gotChar;
/** An inner class to provide a read timeout for alarms. */
class IOTimer extends Thread {
String message;
long milliseconds;
/** Construct an IO Timer */
IOTimer(long sec, String mesg) {
milliseconds = 1000 * sec;
message = mesg;
}
public void run() {
try {
Thread.sleep(milliseconds);
} catch (InterruptedException e) {
// can't happen
}
/** Implement the timer */
if (!gotChar)
errStream.println("Timed out waiting for " + message);
die(1);
}
}
/*
* send a file to the remote
*/
public boolean send(String tfile) throws IOException, InterruptedException
{
char checksum, index, blocknumber, errorcount;
byte character;
byte[] sector = new byte[SECSIZE];
int nbytes;
DataInputStream foo;
foo = new DataInputStream(new FileInputStream(tfile));
errStream.println( "file open, ready to send");
errorcount = 0;
blocknumber = 1;
// The C version uses "alarm()", a UNIX-only system call,
// to detect if the read times out. Here we do detect it
// by using a Thread, the IOTimer class defined above.
gotChar = false;
new IOTimer(SENTIMOUT, "NAK to start send").start();
do {
character = getchar();
gotChar = true;
if (character != NAK && errorcount < MAXERRORS)
++errorcount;
} while (character != NAK && errorcount < MAXERRORS);
errStream.println( "transmission beginning");
if (errorcount == MAXERRORS) {
xerror();
}
while ((nbytes=inStream.read(sector))!=0) {
if (nbytes<SECSIZE)
sector[nbytes]=CPMEOF;
errorcount = 0;
while (errorcount < MAXERRORS) {
errStream.println( "{" + blocknumber + "} ");
putchar(SOH); /* here is our header */
putchar(blocknumber); /* the block number */
putchar(~blocknumber); /* & its complement */
checksum = 0;
for (index = 0; index < SECSIZE; index++) {
putchar(sector[index]);
checksum += sector[index];
}
putchar(checksum); /* tell our checksum */
if (getchar() != ACK)
++errorcount;
else
break;
}
if (errorcount == MAXERRORS)
xerror();
++blocknumber;
}
boolean isAck = false;
while (!isAck) {
putchar(EOT);
isAck = getchar() == ACK;
}
errStream.println( "Transmission complete.");
return true;
}
/*
* receive a file from the remote
*/
public boolean receive(String tfile) throws IOException, InterruptedException
{
char checksum, index, blocknumber, errorcount;
byte character;
byte[] sector = new byte[SECSIZE];
DataOutputStream foo;
foo = new DataOutputStream(new FileOutputStream(tfile));
System.out.println("you have " + SLEEP + " seconds...");
/* wait for the user or remote to get his act together */
gotChar = false;
new IOTimer(SLEEP, "receive from remote").start();
errStream.println("Starting receive...");
putchar(NAK);
errorcount = 0;
blocknumber = 1;
rxLoop:
do {
character = getchar();
gotChar = true;
if (character != EOT) {
try {
byte not_ch;
if (character != SOH) {
errStream.println( "Not SOH");
if (++errorcount < MAXERRORS)
continue rxLoop;
else
xerror();
}
character = getchar();
not_ch = (byte)(~getchar());
errStream.println( "[" + character + "] ");
if (character != not_ch) {
errStream.println( "Blockcounts not ~");
++errorcount;
continue rxLoop;
}
if (character != blocknumber) {
errStream.println( "Wrong blocknumber");
++errorcount;
continue rxLoop;
}
checksum = 0;
for (index = 0; index < SECSIZE; index++) {
sector[index] = getchar();
checksum += sector[index];
}
if (checksum != getchar()) {
errStream.println( "Bad checksum");
errorcount++;
continue rxLoop;
}
putchar(ACK);
blocknumber++;
try {
foo.write(sector);
} catch (IOException e) {
errStream.println("write failed, blocknumber " + blocknumber);
}
} finally {
if (errorcount != 0)
putchar(NAK);
}
}
} while (character != EOT);
foo.close();
putchar(ACK); /* tell the other end we accepted his EOT */
putchar(ACK);
putchar(ACK);
errStream.println("Receive Completed.");
return true;
}
protected byte getchar() throws IOException {
return (byte)inStream.read();
}
protected void putchar(int c) throws IOException {
outStream.write(c);
}
protected void xerror()
{
errStream.println("too many errors...aborting");
die(1);
}
protected void die(int how)
{
if (standalone)
System.exit(how);
else
System.out.println(("Error code " + how));
}
}

Categories

Resources