Audio Line is always unavailable - java

I have a problem with my code suddenly giving an error that it wasn't giving about 24hrs ago. No matter what input device I set as the default in Windows, I always get the following error:
javax.sound.sampled.LineUnavailableException: line with format PCM_SIGNED 8000.0 Hz, 8 bit, stereo, 2 bytes/frame not supported.at java.desktop/com.sun.media.sound.DirectAudioDevice$DirectDL.implOpen(DirectAudioDevice.java:484)
at java.desktop/com.sun.media.sound.AbstractDataLine.open(AbstractDataLine.java:115)
at java.desktop/com.sun.media.sound.AbstractDataLine.open(AbstractDataLine.java:142)
Code for the recorder class:
public class SoundRecorder {
// record duration, in milliseconds
static final long RECORD_TIME = 60000; // 1 minute
// path of the wav file
File wavFile = new File("Audio//RecordAudio.wav");
// format of audio file
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
// the line from which audio data is captured
TargetDataLine line;
/**
* Defines an audio format
*/
AudioFormat getAudioFormat() {
float sampleRate = 8000;
int sampleSizeInBits = 8;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits,
channels, signed, bigEndian);
return format;
}
/**
* Captures the sound and record into a WAV file
*/
void start() {
try {
AudioFormat format = getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
System.out.println(format.properties());
// checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
System.out.println(Arrays.toString(AudioSystem.getAudioFileTypes()));
//System.out.println(line.getLineInfo());
System.out.println("Line not supported");
System.exit(0);
}
line = (TargetDataLine) AudioSystem.getLine(info);
System.out.println(format.properties());
//System.out.println(line.getLineInfo());
//Error is here
>>>line.open(format);
System.out.println(format.properties());
System.out.println(line.getLineInfo());
line.start(); // start capturing
System.out.println("Start capturing...");
AudioInputStream ais = new AudioInputStream(line);
System.out.println("Start recording...");
// start recording
AudioSystem.write(ais, fileType, wavFile);
} catch (LineUnavailableException ex) {
ex.printStackTrace();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
/**
* Closes the target data line to finish capturing and recording
*/
void finish() {
line.stop();
line.close();
System.out.println("Finished");
}
}
Main:
SoundRecorder audio = new SoundRecorder();
long recordTime = 60000L;
Thread stopper = new Thread(() -> {
try {
Thread.sleep(recordTime);
} catch (InterruptedException var4) {
var4.printStackTrace();
}audio.finish();
});
stopper.start();
System.out.println("stopper started");
audio.start();
System.out.println("audio started");
stopper.join();}
I've tried restarting my computer, changing the encoding to signed, unsigned, float, alwa, and ulaw. I've tried changing the sample rate, size, and endian as well. I've also tried changing the default device from the audio mixer to the microphone as well.

Related

Recording speakers Ouput using Java

I want to ask the repetitive question of how to record the audio send to the speakers. But I want some insights to the previously answered.
I went to this page: Capturing speaker output in Java
I saw this code posted by a developer:
import javax.sound.sampled.*;
import java.io.*;
public class JavaSoundRecorder {
// record duration, in milliseconds
static final long RECORD_TIME = 10000; // 1 minute
// path of the wav file
File wavFile = new File("E:/RecordAudio.wav");
// format of audio file
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
// the line from which audio data is captured
TargetDataLine line;
/**
* Defines an audio format
*/
AudioFormat getAudioFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 8;
int channels = 1;
boolean signed = true;
boolean bigEndian = true;
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits,
channels, signed, bigEndian);
return format;
}
/**
* Captures the sound and record into a WAV file
*/
void start() {
try {
AudioFormat format = getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
System.out.println("Line not supported");
System.exit(0);
}
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start(); // start capturing
System.out.println("Start capturing...");
AudioInputStream ais = new AudioInputStream(line);
System.out.println("Start recording...");
// start recording
AudioSystem.write(ais, fileType, wavFile);
} catch (LineUnavailableException ex) {
ex.printStackTrace();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
/**
* Closes the target data line to finish capturing and recording
*/
void finish() {
line.stop();
line.close();
System.out.println("Finished");
}
/**
* Entry to run the program
*/
public static void main(String[] args) {
final JavaSoundRecorder recorder = new JavaSoundRecorder();
// creates a new thread that waits for a specified
// of time before stopping
Thread stopper = new Thread(new Runnable() {
public void run() {
try {
Thread.sleep(RECORD_TIME);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
recorder.finish();
}
});
stopper.start();
// start recording
recorder.start();
}
}
Now I have some questions I want to ask.
This code runs OK on my windows OS but it doesn't work on my Ubuntu on the same machine(dual boot). In Ubuntu it records silence and I tried to get all mixers but can't get it working
I want to get the output going to the speakers and I am getting the output of the speakers. The sound of the vicinity with a very little sound of what I actually want.
Please answer my queries of the above 2 questions.
What I want? I want the clear audio that is currently being played and fetched to the speakers of my laptop. I don't want the audio that is already emitted and then re-recorded because that is bad. Also I need a reason as of why my Ubuntu is not supporting this code.(This is vague info but I am using BlueJ in windows to run this and NetBeans on Ubuntu(without sudo)).
I saw some YouTube videos to understand the theory:
https://www.youtube.com/watch?v=GVtl19L9GxU
https://www.youtube.com/watch?v=PTs01qr9RlY
I read 1 and a half page documentation of oracle here: https://docs.oracle.com/javase/tutorial/sound/accessing.html
There was this thing mentioned in the docs:
An applet running with the applet security manager can play, but not record, audio.
An application running with no security manager can both play and record audio.
An application running with the default security manager can play, but not record, audio.
But I don't think I turned any security manager.
In the end I found no success in what I want to do. Instead of going further in the documentation I thought to ask the question here.

playing WAV file over GSM modem

I want to play a WAV file over GSM modem. Here is my sample code
private final int BUFFER_SIZE = 8;
private File soundFile;
private AudioInputStream audioStream;
private AudioFormat audioFormat;
public void playSound(String filename) throws IOException{
String strFilename = filename;
try {
soundFile = new File(strFilename);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
try {
audioStream = AudioSystem.getAudioInputStream(soundFile);
} catch (Exception e){
e.printStackTrace();
System.exit(1);
}
audioFormat = audioStream.getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
int nBytesRead = 0;
byte[] abData = new byte[BUFFER_SIZE];
while (nBytesRead != -1) {
try {
nBytesRead = audioStream.read(abData);
} catch (IOException e) {
e.printStackTrace();
}
if (nBytesRead >= 0) {
outputStream.write(abData, 0, nBytesRead);
outputStream.flush();
}
}
}
But the problem is the WAV file sending through serial port is playing very fast. I don't know what's the problem . Here is my WAV file description:
ULAW 8000.0 Hz, 8 bit, mono, 1 bytes/frame, Audio Sample Rate 8Khz.
Can anyone help me to solve the issue?
I would check the following - especially #3.
Synchronization
AudioFormat
JSSRC Resampling Library
How about sleeping a while after "outputStream.flush();"
might be Thread.sleep(50)
I think your problem is at the receiving end and doing playback. Set audio sample rate there to match your audio data. Also make sure serial port flow control is enabled (or look into it if you get correct playback speed, but parts of audio are lost). You know the sample rate of the file, so set the receiving end to have same sample rate (and other parameters).
If receiving end is out of reach or can't be changed, you need to change the sample rate at the transmitting end to match what receiver expects. Easiest is to use some audio editor (such as SoX, which is command line tool) to change the audio file. You should try this first, just to check that you can get good playback with right audio format.
More flexible way is to do it in your program, so you can feed it any audio file, and then it will convert it to correct sample rate and play it correctly. But this is of course more complex, too. Look for a library, such as the one recommended in that other answer by Elliott Frisch.

Java audio Stream Closed error

I am trying to add sound to a game I am making, but every time I try to load the sound, I get a Stream Closed Exception. I don't understand why this is happening.
Loads the sound:
public class WavPlayer extends Thread {
/*
* #param s The path of the wav file.
* #return The sound data loaded into the WavSound object
*/
public static WavSound loadSound(String s){
// Get an input stream
InputStream is = WavPlayer.class.getClassLoader().getResourceAsStream(s);
AudioInputStream audioStream;
try {
// Buffer the input stream
BufferedInputStream bis = new BufferedInputStream(is);
// Create the audio input stream and audio format
audioStream = AudioSystem.getAudioInputStream(bis); //!Stream Closed Exception occurs here
AudioFormat format = audioStream.getFormat();
// The length of the audio file
int length = (int) (audioStream.getFrameLength() * format.getFrameSize());
// The array to store the samples in
byte[] samples = new byte[length];
// Read the samples into array to reduce disk access
// (fast-execution)
DataInputStream dis = new DataInputStream(audioStream);
dis.readFully(samples);
// Create a sound container
WavSound sound = new WavSound(samples, format, (int) audioStream.getFrameLength());
// Don't start the sound on load
sound.setState(SoundState.STATE_STOPPED);
// Create a new player for each sound
new WavPlayer(sound);
return sound;
} catch (Exception e) {
// An error. Mustn't happen
}
return null;
}
// Private variables
private WavSound sound = null;
/**
* Constructs a new player with a sound and with an optional looping
*
* #param s The WavSound object
*/
public WavPlayer(WavSound s) {
sound = s;
start();
}
/**
* Runs the player in a separate thread
*/
#Override
public void run(){
// Get the byte samples from the container
byte[] data = sound.getData();
InputStream is = new ByteArrayInputStream(data);
try {
// Create a line for the required audio format
SourceDataLine line = null;
AudioFormat format = sound.getAudioFormat();
// Calculate the buffer size and create the buffer
int bufferSize = sound.getLength();
// System.out.println(bufferSize);
byte[] buffer = new byte[bufferSize];
// Create a new data line to write the samples onto
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
line = (SourceDataLine) AudioSystem.getLine(info);
// Open and start playing on the line
try {
if (!line.isOpen()) {
line.open();
}
line.start();
} catch (Exception e){}
// The total bytes read
int numBytesRead = 0;
boolean running = true;
while (running) {
// Destroy this player if the sound is destroyed
if (sound.getState() == SoundState.STATE_DESTROYED) {
running = false;
// Release the line and release any resources used
line.drain();
line.close();
}
// Write the data only if the sound is playing or looping
if ((sound.getState() == SoundState.STATE_PLAYING)
|| (sound.getState() == SoundState.STATE_LOOPING)) {
numBytesRead = is.read(buffer, 0, buffer.length);
if (numBytesRead != -1) {
line.write(buffer, 0, numBytesRead);
} else {
// The samples are ended. So reset the position of the
// stream
is.reset();
// If the sound is not looping, stop it
if (sound.getState() == SoundState.STATE_PLAYING) {
sound.setState(SoundState.STATE_STOPPED);
}
}
} else {
// Not playing. so wait for a few moments
Thread.sleep(Math.min(1000 / Global.FRAMES_PER_SECOND, 10));
}
}
} catch (Exception e) {
// Do nothing
}
}
The error message I get is: "Exception in thread "main" java.io.IOException: Stream closed
at java.io.BufferedInputStream.getInIfOpen(BufferedInputStream.java:134)
at java.io.BufferedInputStream.fill(BufferedInputStream.java:218)
at java.io.BufferedInputStream.read(BufferedInputStream.java:237)
at java.io.DataInputStream.readInt(DataInputStream.java:370)
at com.sun.media.sound.WaveFileReader.getFMT(WaveFileReader.java:224)
at com.sun.media.sound.WaveFileReader.getAudioInputStream(WaveFileReader.java:140)
at javax.sound.sampled.AudioSystem.getAudioInputStream(AudioSystem.java:1094)
at stm.sounds.WavPlayer.loadSound(WavPlayer.java:42)
at stm.STM.(STM.java:265)
at stm.STM.main(STM.java:363)"
Most probably the file path in this line is not correct:
WavPlayer sound1 = WavPlayer.loadSound("coin.wav");
You should pass the path of the 'coin.wav' file instead of just its name.
For instance if its under a folder named sounds, which let's say right under the root of project, that parameter should be 'sounds/coin.wav'.
The problem is in your static method loadSound. This method returns null when an exception is thrown. You catch it but you do nothing with it,
NEVER make empty catch.
Catch specific exceptions.
I would change your method signature loadSound as
public static WavSound loadSound(String s) throws Exception // rather than exception specific exception!!
And then your method without try-catch

Play audio in multiple outputs

I need to play music in different audio outputs.
For instance, I have two musics: music1 and music2,
and they have to play in separate threads in diferent speakers.
Assuming that I have more than one audio device that is able
to play sound:
I found this method (here - it is the BasicPlayer):
protected void createLine() throws LineUnavailableException
{
log.info("Create Line");
if (m_line == null)
{
AudioFormat sourceFormat = m_audioInputStream.getFormat();
log.info("Create Line : Source format : " + sourceFormat.toString());
int nSampleSizeInBits = sourceFormat.getSampleSizeInBits();
if (nSampleSizeInBits <= 0) nSampleSizeInBits = 16;
if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) nSampleSizeInBits = 16;
if (nSampleSizeInBits != 8) nSampleSizeInBits = 16;
AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false);
log.info("Create Line : Target format: " + targetFormat);
// Keep a reference on encoded stream to progress notification.
m_encodedaudioInputStream = m_audioInputStream;
try
{
// Get total length in bytes of the encoded stream.
encodedLength = m_encodedaudioInputStream.available();
}
catch (IOException e)
{
log.error("Cannot get m_encodedaudioInputStream.available()", e);
}
// Create decoded stream.
m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
AudioFormat audioFormat = m_audioInputStream.getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
Mixer mixer = getMixer(m_mixerName);
if (mixer != null)
{
log.info("Mixer : "+mixer.getMixerInfo().toString());
m_line = (SourceDataLine) mixer.getLine(info);
}
else
{
m_line = (SourceDataLine) AudioSystem.getLine(info);
m_mixerName = null;
}
log.info("Line : " + m_line.toString());
log.debug("Line Info : " + m_line.getLineInfo().toString());
log.debug("Line AudioFormat: " + m_line.getFormat().toString());
}
}
With a little debugging, I've found out that the mixer is always null. Why is that?
The mixer shoudn't be the device that outputs sound through a target line?
This program always playback in the default device set on my computer, what can I do to change that?
I've actually just started working with the Java Sound API for one of my own projects, but from what I understand, Mixer is just an interface, not an object. That can explain a part of your problem.

Capturing speaker output in Java

Using Java is it possible to capture the speaker output? This output is not being generated by my program but rather by other running applications. Can this be done with Java or will I need to resort to C/C++?
I had a Java based app. that used Java Sound to tap into the sound flowing through the system to make a trace of it. It worked well on my own (Windows based) machine, but failed completely on some others.
It was determined that in order to get it working on those machines, would take nothing short of an audio loop-back in either software or hardware (e.g. connect a lead from the speaker 'out' jack to the microphone 'in' jack).
Since all I really wanted to do was plot the trace for music, and I figured how to play the target format (MP3) in Java, it became unnecessary to pursue the other option further.
(And I also heard that Java Sound on Mac. was horribly broken, but I never looked closely into it.)
Java is not the best tool when dealing with the OS. If you need/want to use it for this task, probably you will end using Java Native Interface (JNI), linking to libraries compiled in other languages (probably c/c++).
Take an AUX cable, connect to HEADPHONE JACK and other end to MICROPHONE JACK and run this code
https://www.codejava.net/coding/capture-and-record-sound-into-wav-file-with-java-sound-api
import javax.sound.sampled.*;
import java.io.*;
public class JavaSoundRecorder {
// record duration, in milliseconds
static final long RECORD_TIME = 60000; // 1 minute
// path of the wav file
File wavFile = new File("E:/Test/RecordAudio.wav");
// format of audio file
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
// the line from which audio data is captured
TargetDataLine line;
/**
* Defines an audio format
*/
AudioFormat getAudioFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 8;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits,
channels, signed, bigEndian);
return format;
}
/**
* Captures the sound and record into a WAV file
*/
void start() {
try {
AudioFormat format = getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
System.out.println("Line not supported");
System.exit(0);
}
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start(); // start capturing
System.out.println("Start capturing...");
AudioInputStream ais = new AudioInputStream(line);
System.out.println("Start recording...");
// start recording
AudioSystem.write(ais, fileType, wavFile);
} catch (LineUnavailableException ex) {
ex.printStackTrace();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
/**
* Closes the target data line to finish capturing and recording
*/
void finish() {
line.stop();
line.close();
System.out.println("Finished");
}
/**
* Entry to run the program
*/
public static void main(String[] args) {
final JavaSoundRecorder recorder = new JavaSoundRecorder();
// creates a new thread that waits for a specified
// of time before stopping
Thread stopper = new Thread(new Runnable() {
public void run() {
try {
Thread.sleep(RECORD_TIME);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
recorder.finish();
}
});
stopper.start();
// start recording
recorder.start();
}
}

Categories

Resources