First of all, if not using function decode_path , I can play .wav file with my code , and it works fine I use Jlayer and audio track to play the song.
Second, if I use function decode_path it can decode mp3 to pcm file , and pass the byte[] to function PlayAudioTrack, and let it play.
The quesion is,I don't know where my code is wrong , I use 320Kbps, 44.1Khz stereo type, Layer3 mp3, but the AudioTrack plays noise but no music~!!!!
can anyone ?
???
My code
public void PlayAudioTrack(String filePath) throws IOException{
int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
//Reading the file..
int count = 512 * 1024; // 512 kb
// byte[] byteData = null;
// byteData = new byte[(int)count];
//we can decode correct byte data here
byte[] byteData = null;
byteData = decode_path(filePath, 0, 20000);
File file = null;
file = new File(filePath);
FileInputStream in = null;
try {
in = new FileInputStream( file );
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int bytesread = 0, ret = 0;
int size = (int) file.length();
at.play();
while (bytesread < size) {
Log.e("devon","write byte array with sizes");
ret = in.read( byteData,0, count);
if (ret != -1) {
Log.e("devon","Write the byte array to the track");
at.write(byteData,0, ret);
bytesread += ret;
}else break;
}
at.stop();
at.release();
}
public static byte[] decode_path(String path, int startMs, int maxMs)
throws IOException{
ByteArrayOutputStream outStream = new ByteArrayOutputStream(1024);
float totalMs = 0;
boolean seeking = true;
File file = new File(path);
InputStream inputStream = new BufferedInputStream(new FileInputStream(file), 8 * 1024);
try {
Bitstream bitstream = new Bitstream(inputStream);
Decoder decoder = new Decoder();
boolean done = false;
while (! done) {
Header frameHeader = bitstream.readFrame();
if (frameHeader == null) {
done = true;
} else {
totalMs += frameHeader.ms_per_frame();
if (totalMs >= startMs) {
seeking = false;
}
if (! seeking) {
SampleBuffer output = (SampleBuffer) decoder.decodeFrame(frameHeader, bitstream);
if (output.getSampleFrequency() != 44100
|| output.getChannelCount() != 2) {
throw new IllegalArgumentException("mono or non-44100 MP3 not supported");
}
short[] pcm = output.getBuffer();
for (short s : pcm) {
outStream.write(s & 0xff);
outStream.write((s >> 8 ) & 0xff);
}
}
if (totalMs >= (startMs + maxMs)) {
done = true;
}
}
bitstream.closeFrame();
}
return outStream.toByteArray();
} catch (BitstreamException e) {
throw new IOException("Bitstream error: " + e);
} catch (DecoderException e) {
Log.w(TAG, "Decoder error", e);
throw new IOException("Decoder error: " + e);
}
}
public void PlayAudioTrack(String filePath) throws IOException{
int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
//Reading the file..
int count = 512 * 1024; // 512 kb
// byte[] byteData = null;
// byteData = new byte[(int)count];
//we can decode correct byte data here
byte[] byteData = null;
byteData = decode_path(filePath, 0, 20000);
int temp =0;
at.play();
while (temp<byteData.length)
{
at.write(byteData, temp, count);
temp+= count;
}
at.stop();
at.release();
}
Related
I am trying to record an audio stream via a Bluetooth device. I am using Bluetooth SCO for getting Bluetooth audio and AudioRecord class to record audio.
I am recording RAW .PCM files with MONO Channel with a sampling rate of 16000
I am calculating BufferSize like this
private static final int BUFFER_SIZE_FACTOR = 2;
private static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLING_RATE_IN_HZ,CHANNEL_CONFIG, AUDIO_FORMAT) * BUFFER_SIZE_FACTOR;
This is how I am getting/writing audio currently,
private class RecordingRunnable implements Runnable {
#Override
public void run() {
setFileNameAndPath();
final ByteBuffer buffer = ByteBuffer.allocateDirect(BUFFER_SIZE);
try (final FileOutputStream outStream = new FileOutputStream(mFilePath)) {
while (recordingInProgress.get()) {
int result = recorder.read(buffer, BUFFER_SIZE);
if (result < 0) {
throw new RuntimeException("Reading of audio buffer failed: " +
getBufferReadFailureReason(result));
}
outStream.write(buffer.array(), 0, BUFFER_SIZE);
buffer.clear();
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Writing of recorded audio failed", e);
}
}
I did a little research and found that the clipping effect could be because of the wrong Byte order (LITTLE_ENDIAN or BIG_ENDIAN) or Because of poor multithreading. However in this current implementation, I am not able to understand how bytes are being ordered and saved & what can I do to fix the clipping/noise problem.
I am starting my recorder runnable like this
recordingThread = new Thread(new RecordingRunnable(), "Recording Thread");
recordingThread.start();
recordingThread.setPriority(Thread.MAX_PRIORITY);
I got same issue and I resolved this problem with below code.
private byte[] short2byte(short[] sData, int size) {
int shortArrsize = size;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
......
int bufferSize = AudioRecord.getMinBufferSize(48000, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT);
short[] buffer = new short[bufferSize];
int source = MediaRecorder.AudioSource.VOICE_RECOGNITION;
mAudioRecorder = new AudioRecord(source, 48000,
AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
int state = mAudioRecorder.getState();
if (state != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "Can not support");
return;
}
mAudioRecorder.startRecording();
while (mIsRecording) {
int bufferReadResult = mAudioRecorder.read(buffer, 0, bufferSize);
if (bufferReadResult < 0) {
continue;
}
try {
byte data[] = short2byte(buffer, bufferReadResult);
fos.write(data, 0, bufferReadResult * 2);
} catch (IOException e) {
e.printStackTrace();
}
}
I want to convert an audio file into a byte array. I currently did it and I want to know if its works :
private static AudioFormat getFormat() {
float sampleRate = 44100;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed,
bigEndian);
}
public static byte[] listenSound(File f) {
AudioInputStream din = null;
AudioInputStream outDin = null;
PCM2PCMConversionProvider conversionProvider = new PCM2PCMConversionProvider();
try {
AudioInputStream in = AudioSystem.getAudioInputStream(f);
AudioFormat baseFormat = in.getFormat();
AudioFormat decodedFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
baseFormat.getSampleRate(),
16,
baseFormat.getChannels(),
baseFormat.getChannels() * 2,
baseFormat.getSampleRate(),
false);
din = AudioSystem.getAudioInputStream(decodedFormat, in);
if (!conversionProvider.isConversionSupported(getFormat(), decodedFormat)) {
System.out.println("Conversion Not Supported.");
System.exit(-1);
}
outDin = conversionProvider.getAudioInputStream(getFormat(), din);
ByteArrayOutputStream out = new ByteArrayOutputStream();
int n = 0;
byte[] buffer = new byte[1024];
while (true) {
n++;
if (n > 1000)
break;
int count = 0;
count = outDin.read(buffer, 0, 1024);
if (count > 0) {
out.write(buffer, 0, count);
}
}
in.close();
din.close();
outDin.close();
out.flush();
out.close();
//byte[] b=out.toByteArray();
//for(int i=0; i<b.length; i++)
//System.out.println("b = "+b[i]);
return out.toByteArray();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
That byte array data is actually known as time domain i need to be sure if it works before transforming this data into frequency domain with Discrete Fourier.
Thanks for your help !
typically the actual number of bytes processed is returned from a call like
count = outDin.read(buffer, 0, 1024);
so in addition to your current hard break after processing 1000 chunks if the API does in fact return a byte count you should check for it :
int size_chunk = 1024
byte[] buffer = new byte[size_chunk];
boolean keep_streaming = true;
while (keep_streaming) {
n++;
if (n > 1000) { // troubleshooting ONLY remove later
keep_streaming = false;
}
int count = 0;
count = outDin.read(buffer, 0, size_chunk);
if (count > 0) {
out.write(buffer, 0, count);
}
if (count < size_chunk) { // input stream has been consumed
keep_streaming = false;
}
}
You did not supply a link to the API doc so I cannot confirm, however assuming outDin.read will output the number of bytes actually processed, the above code will correctly output only the bytes to match input and so will result in a smaller output if input is less than 1 meg of data (your original logic blindly generated a 1 meg output stopped only after seeing 1000 chunks ... it also assumes you intend to truncate input after 1 meg of data as per your lines
if (n > 1000) { // troubleshooting ONLY remove later
keep_streaming = false;
}
I obtaining bytes from InputStream, but I need to modify and save them to Wav File.
Here my code:
Socket Sending Audio Obtained from Microphone.
AudioFormat adfmt = new AudioFormat(8000.0f, 8, 1, true , true);
int bufferSize = (int) adfmt.getSampleRate()* adfmt.getFrameSize();
byte[] buffer = new byte[bufferSize];
Socket clientSocketO = new Socket(...);
OutputStream output = clientSocketO.getOutputStream();
DataLine.Info dlInfo = new DataLine.Info(TargetDataLine.class, adfmt);
TargetDataLine tdLine = (TargetDataLine) AudioSystem.getLine(dlInfo);
tdLine.open(adfmt);
tdLine.start(); // start capturing
boolean bRunningO = true;
while (bRunningO) {
int count = tdLine.read(buffer, 0, buffer.length);
if (count > 0) {
byte[] outgoingBytes = Arrays.copyOf(buffer, count);
output.write(outgoingBytes);
}
}
tdLine.flush();
In the Other Side Socket receiving bytes :
AudioFormat adfmt = new AudioFormat(8000.0f, 8, 1, true , true);
int bufferSize = (int) adfmt.getSampleRate()* adfmt.getFrameSize();
byte[] buffer = new byte[bufferSize];
Socket clientSocketI = new Socket(...);
InputStream input = clientSocketI.getInputStream();
String fileName = System.getProperty("file.separator") + "SomeFile.wav"
File fileStreamedWav = new File((new File("")).getAbsolutePath() + fileName);
AudioInputStream ais;
ByteArrayInputStream bis;
DataLine.Info dlInfo = new DataLine.Info(SourceDataLine.class, adfmt);
//SourceDataLine sdLine = (SourceDataLine) AudioSystem.getLine(dlInfo);
//sdLine.open(adfmt);
//sdLine.start(); // start playback
AudioFileFormat.Type afType = AudioFileFormat.Type.WAVE;
boolean bRunningI = true;
while (bRunningI) {
try {
int read = input.read(buffer); //Socket Reading bytes
byte[] incomingBytes;
if (read > 0) {
incomingBytes = Arrays.copyOf(buffer, read);
if (incomingBytes!= null) {
//sdLine.write(incomingBytes, 0, incomingBytes.length);
//Same Size bytes, but isn't necessary submit the put Code
byte[] changedBytes = MethodChangerBytes(incomingBytes);
bis = new ByteArrayInputStream(changedBytes);
ais = new AudioInputStream(bis, adfmt,
changedBytes.length/adfmt.getFrameSize());
int W = AudioSystem.write(ais, afType, fileStreamedWav);
System.out.println("AudioSystem.write:" + W);
}
}
} catch (IOException e) {
bRunningI = false;
}
}
Here the code modifier of Bytes, for Now assume amplify by two...
byte[] MethodChangerBytes(byte[] incoming) {
byte[] outgoing = new byte[incoming.length];
for (int i = 0; i < incoming.length; i ++) {
// Really is not important what happens here
double Sample = (double)(short)(((incoming[i] - 128) & 0xFF) << 8);
Sample *= 2.0;
outgoing[i] = (byte)(((int()Sample >> 8) + 128) & 0xFF);
}
return outgoing;
}
When sdLine is uncommented then I can here all sound transmitted.
AudioInputStream(InputStream stream, AudioFormat format, long length)
AudioSystem.write(AudioInputStream stream, AudioFileFormat.Type fileType, File out)
The problem:
This code Only Save the Last Bytes obtained from MethodChangerBytes.
Question:
How Save all bytes processed Wav bytes until Socket connection is closed?
Thank you
Have a buffer:
ByteArrayOutputStream outputStream=new ByteArrayOutputStream();
write to this buffer then move the writing outside the loop; when all bytes are read save:
boolean bRunningI = true;
try {
while (bRunningI) {
int read = input.read(buffer); //Socket Reading bytes
byte[] incomingBytes;
if (read > 0) {
incomingBytes = Arrays.copyOf(buffer, read);
if (incomingBytes!= null) {
//sdLine.write(incomingBytes, 0, incomingBytes.length);
//Same Size bytes, but isn't necessary submit the put Code
byte[] changedBytes = MethodChangerBytes(incomingBytes);
outputStream.write(changedBytes, 0, changedBytes.length);
}
}
}
byte[] allBytes=outputStream.toByteArray();
bis = new ByteArrayInputStream(allBytes);
ais = new AudioInputStream(bis, adfmt,
changedBytes.length/adfmt.getFrameSize());
int W = AudioSystem.write(ais, afType, fileStreamedWav);
System.out.println("AudioSystem.write:" + W);
} catch (IOException e) {
bRunningI = false;
}
i am recording a pcm file thorugh AudioRecorder and create a file in sdcard. when recording is done i play the same file with Audiotrack but it crash. in logcat it says "Invalid audio buffer size". also my file size on sdcard is 0.0kb ,which i didn't understand why. here is the code. `
public void startRecord() {
File file = new File(Environment.getExternalStorageDirectory(),
"test.pcm");
int sampleFreq = 11025;
try {
file.createNewFile();
OutputStream outputStream = new FileOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(
outputStream);
DataOutputStream dataOutputStream = new DataOutputStream(
bufferedOutputStream);
int minBufferSize = AudioRecord.getMinBufferSize(sampleFreq,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
short[] audioData = new short[minBufferSize];
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, sampleFreq,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize);
audioRecord.startRecording();
while (recording) {
int numberOfShort = audioRecord.read(audioData, 0,
minBufferSize);
for (int i = 0; i < numberOfShort; i++) {
dataOutputStream.writeShort(audioData[i]);
}
}
audioRecord.stop();
dataOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public void PLaying(int explosion) {
File file = new File(Environment.getExternalStorageDirectory(),
"test.pcm");
int shortSizeInBytes = Short.SIZE / Byte.SIZE;
int bufferSizeInBytes = (int) (file.length() / shortSizeInBytes);
short[] audioData = new short[bufferSizeInBytes];
try {
InputStream inputStream = new FileInputStream(file);
BufferedInputStream bufferedInputStream = new BufferedInputStream(
inputStream);
DataInputStream dataInputStream = new DataInputStream(
bufferedInputStream);
int i = 0;
while (dataInputStream.available() > 0) {
audioData[i] = dataInputStream.readShort();
i++;
}
dataInputStream.close();
int sampleFreq = explosion;
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,
AudioTrack.MODE_STREAM);
audioTrack.play();
audioTrack.write(audioData, 0, bufferSizeInBytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
`
Do You have integrated the stop method? Please Show us a Little bit more code. The Problem is, if You haven´t implemented a correct stop mehtod, it will Loop forever..Have You done something like this:
yourStopButton.setOnClickListener(new OnClickListener(){
#Override
public void onClick(View view){
recording=false;
}
});
And then, don´t Forget to do the same in onStart and set recording to true.
This code looks dodgy
while (recording) {
int numberOfShort = audioRecord.read(audioData, 0,
minBufferSize);
for (int i = 0; i < numberOfShort; i++) {
dataOutputStream.writeShort(audioData[i]);
}
}
What is the value of recording. Where does the value change. If it does not change it will be an endless loop (unless and exception is thrown).
Divide the minimum buffer size by 2 when constructing the AudioRecord instance
My project is 'Speech Recognition of Azeri speech'. I have to write a program that converts wav files to byte array.
How to convert audio file to byte[]?
Basically as described by the snippet in the first answer, but instead of the BufferedInputStream use AudioSystem.getAudioInputStream(File) to get the InputStream.
Using the audio stream as obtained from AudioSystem will ensure that the headers are stripped, and the input file decode to a byte[] that represents the actual sound frames/samples - which can then be used for FFT etc.
Write this file into ByteArrayOutputStream
ByteArrayOutputStream out = new ByteArrayOutputStream();
BufferedInputStream in = new BufferedInputStream(new FileInputStream(WAV_FILE));
int read;
byte[] buff = new byte[1024];
while ((read = in.read(buff)) > 0)
{
out.write(buff, 0, read);
}
out.flush();
byte[] audioBytes = out.toByteArray();
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.LinkedHashMap;
import javax.sound.sampled.*;
/**
* This class reads a .wav file and converts it to a bunch of byte arrays.
*
* The info represented by these byte arrays is then printed out.
*
* An example of playing these byte arrays with the speakers is used.
*
* It also converts the byte arrays to a .wav file.
*
* An extension of this concept can record from a microphone.
* In this case, some values like sampling rate would need to be assumed.
*
* See https://ccrma.stanford.edu/courses/422/projects/WaveFormat/ for .wav file spec
*
* #author sizu
*/
public class WavFileHelper {
public static void main(String[] args) {
final String NEWLINE = "\n";
int recordingSampleRate = 22050;
short recordingBitsPerSample = 16;
short recordingNumChannels = 2;
String inputFile = "/input.wav"; // Place the wav file in the top level directory, ie S:/input.wav
String outputFile = "/output.wav";
String recordedFile = "/capture.wav";
System.out.println("START");
try {
WavData wavInputData = new WavData();
WavData wavRecordData = new WavData();
wavRecordData.put(WaveSection.SAMPLE_RATE, recordingSampleRate);
wavRecordData.put(WaveSection.BITS_PER_SAMPLE, recordingBitsPerSample);
wavRecordData.put(WaveSection.NUM_CHANNELS, recordingNumChannels);
System.out.println(NEWLINE+"CONVERT WAV FILE TO BYTE ARRAY");
wavInputData.read(inputFile);
System.out.println(NEWLINE+"CONVERT BYTE ARRAY TO WAV FILE");
wavInputData.write(outputFile);
System.out.println(NEWLINE+"DISPLAY BYTE ARRAY INFORMATION FOR INPUT FILE");
wavInputData.printByteInfo();
System.out.println(NEWLINE+"START RECORDING - You can connect the microphone to the speakers");
WavAudioRecorder recorder = new WavFileHelper.WavAudioRecorder(wavRecordData);
recorder.startRecording();
System.out.println(NEWLINE+"PLAY BYTE ARRAY (THIS WILL BE RECORDED)");
WavAudioPlayer player = new WavFileHelper.WavAudioPlayer(wavInputData);
player.playAudio();
System.out.println(NEWLINE+"STOP RECORDING FOR RECORDING");
recorder.stopRecording();
System.out.println(NEWLINE+"DISPLAY BYTE ARRAY INFORMATION");
wavRecordData.printByteInfo();
System.out.println(NEWLINE+"SAVE RECORDING IN WAV FILE");
wavRecordData.write(recordedFile);
} catch (Exception ex) {
ex.printStackTrace();
}
System.out.println("FINISH");
}
public static enum WaveSection {
// 12 Bytes
CHUNK_ID(4, ByteOrder.BIG_ENDIAN),
CHUNK_SIZE(4, ByteOrder.LITTLE_ENDIAN),
FORMAT(4, ByteOrder.BIG_ENDIAN),
// 24 Bytes
SUBCHUNK1_ID(4, ByteOrder.BIG_ENDIAN),
SUBCHUNK1_SIZE(4, ByteOrder.LITTLE_ENDIAN),
AUDIO_FORMAT(2, ByteOrder.LITTLE_ENDIAN),
NUM_CHANNELS(2, ByteOrder.LITTLE_ENDIAN),
SAMPLE_RATE(4, ByteOrder.LITTLE_ENDIAN),
BYTE_RATE(4, ByteOrder.LITTLE_ENDIAN),
BLOCK_ALIGN(2, ByteOrder.LITTLE_ENDIAN),
BITS_PER_SAMPLE(2, ByteOrder.LITTLE_ENDIAN),
// 8 Bytes
SUBCHUNK2_ID(4, ByteOrder.BIG_ENDIAN),
SUBCHUNK2_SIZE(4, ByteOrder.LITTLE_ENDIAN),
DATA(0, ByteOrder.LITTLE_ENDIAN),
;
private Integer numBytes;
private ByteOrder endian;
WaveSection(Integer numBytes, ByteOrder endian){
this.numBytes = numBytes;
this.endian = endian;
}
}
public static class WavData extends LinkedHashMap<WaveSection, byte[]>{
static int HEADER_SIZE = 44; // There are 44 bits before the data section
static int DEFAULT_SUBCHUNK1_SIZE = 16;
static short DEFAULT_AUDIO_FORMAT = 1;
static short DEFAULT_BLOCK_ALIGN = 4;
static String DEFAULT_CHUNK_ID = "RIFF";
static String DEFAULT_FORMAT = "WAVE";
static String DEFAULT_SUBCHUNK1_ID = "fmt ";
static String DEFAULT_SUBCHUNK2_ID = "data";
public WavData(){
this.put(WaveSection.CHUNK_ID, DEFAULT_CHUNK_ID);
this.put(WaveSection.FORMAT, DEFAULT_FORMAT);
this.put(WaveSection.SUBCHUNK1_ID, DEFAULT_SUBCHUNK1_ID);
this.put(WaveSection.SUBCHUNK1_SIZE, DEFAULT_SUBCHUNK1_SIZE);
this.put(WaveSection.AUDIO_FORMAT, DEFAULT_AUDIO_FORMAT);
this.put(WaveSection.BLOCK_ALIGN, DEFAULT_BLOCK_ALIGN);
this.put(WaveSection.SUBCHUNK2_ID, DEFAULT_SUBCHUNK2_ID);
this.put(WaveSection.CHUNK_SIZE, 0);
this.put(WaveSection.SUBCHUNK2_SIZE, 0);
this.put(WaveSection.BYTE_RATE, 0);
}
public void put(WaveSection waveSection, String value){
byte[] bytes = value.getBytes();
this.put(waveSection, bytes);
}
public void put(WaveSection waveSection, int value) {
byte[] bytes = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN).putInt(value).array();
this.put(waveSection, bytes);
}
public void put(WaveSection waveSection, short value) {
byte[] bytes = ByteBuffer.allocate(2).order(ByteOrder.LITTLE_ENDIAN).putShort(value).array();
this.put(waveSection, bytes);
}
public byte[] getBytes(WaveSection waveSection) {
return this.get(waveSection);
}
public String getString(WaveSection waveSection) {
byte[] bytes = this.get(waveSection);
return new String(bytes);
}
public int getInt(WaveSection waveSection) {
byte[] bytes = this.get(waveSection);
return ByteBuffer.wrap(bytes).order(ByteOrder.LITTLE_ENDIAN).getInt();
}
public short getShort(WaveSection waveSection) {
byte[] bytes = this.get(waveSection);
return ByteBuffer.wrap(bytes).order(ByteOrder.LITTLE_ENDIAN).getShort();
}
public void printByteInfo() {
for (WaveSection waveSection : WaveSection.values()) {
if (waveSection.numBytes == 4
&& waveSection.endian == ByteOrder.BIG_ENDIAN) {
System.out.println("SECTION:" + waveSection + ":STRING:"
+ this.getString(waveSection));
} else if (waveSection.numBytes == 4
&& waveSection.endian == ByteOrder.LITTLE_ENDIAN) {
System.out.println("SECTION:" + waveSection + ":INTEGER:"
+ this.getInt(waveSection));
} else if (waveSection.numBytes == 2
&& waveSection.endian == ByteOrder.LITTLE_ENDIAN) {
System.out.println("SECTION:" + waveSection + ":SHORT:"
+ this.getShort(waveSection));
} else {
// Data Section
}
}
}
public void read(String inputPath) throws Exception {
// Analyze redundant info
int dataSize = (int) new File(inputPath).length() - HEADER_SIZE;
WaveSection.DATA.numBytes = dataSize; // Can't have two threads using this at the same time
// Read from File
DataInputStream inFile = new DataInputStream(new FileInputStream(inputPath));
for (WaveSection waveSection : WaveSection.values()) {
byte[] readBytes = new byte[waveSection.numBytes];
for (int i = 0; i < waveSection.numBytes; i++) {
readBytes[i] = inFile.readByte();
}
this.put(waveSection, readBytes);
}
inFile.close();
}
public void write(String outputPath) throws Exception {
// Analyze redundant info
int dataSize = this.get(WaveSection.DATA).length;
this.put(WaveSection.CHUNK_SIZE, dataSize+36);
this.put(WaveSection.SUBCHUNK2_SIZE, dataSize);
int byteRate = this.getInt(WaveSection.SAMPLE_RATE)*this.getShort(WaveSection.BLOCK_ALIGN);
this.put(WaveSection.BYTE_RATE, byteRate);
// Write to File
DataOutputStream dataOutputStream = new DataOutputStream(new FileOutputStream(outputPath));
for (WaveSection waveSection : WaveSection.values()) {
dataOutputStream.write(this.getBytes(waveSection));
}
dataOutputStream.close();
}
public AudioFormat createAudioFormat() {
boolean audioSignedSamples = true; // Samples are signed
boolean audioBigEndian = false;
float sampleRate = (float) this.getInt(WaveSection.SAMPLE_RATE);
int bitsPerSample = (int) this.getShort(WaveSection.BITS_PER_SAMPLE);
int numChannels = (int) this.getShort(WaveSection.NUM_CHANNELS);
return new AudioFormat(sampleRate, bitsPerSample,
numChannels, audioSignedSamples, audioBigEndian);
}
}
public static class WavAudioPlayer {
WavData waveData = new WavData();
public WavAudioPlayer(WavData waveData){
this.waveData = waveData;
}
public void playAudio() throws Exception {
byte[] data = waveData.getBytes(WaveSection.DATA);
// Create an audio input stream from byte array
AudioFormat audioFormat = waveData.createAudioFormat();
InputStream byteArrayInputStream = new ByteArrayInputStream(data);
AudioInputStream audioInputStream = new AudioInputStream(byteArrayInputStream,
audioFormat, data.length / audioFormat.getFrameSize());
// Write audio input stream to speaker source data line
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class,
audioFormat);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(audioFormat);
sourceDataLine.start();
// Loop through input stream to write to source data line
byte[] tempBuffer = new byte[10000];
int cnt;
while ((cnt = audioInputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
sourceDataLine.write(tempBuffer, 0, cnt);
}
// Cleanup
sourceDataLine.drain();
sourceDataLine.close();
byteArrayInputStream.close();
}
}
public static class WavAudioRecorder implements Runnable {
WavData waveData = new WavData();
boolean recording = true;
Thread runningThread;
ByteArrayOutputStream byteArrayOutputStream;
public WavAudioRecorder(WavData waveData){
this.waveData = waveData;
}
public void startRecording(){
this.recording = true;
this.runningThread = new Thread(this);
runningThread.start();
}
public WavData stopRecording() throws Exception{
this.recording = false;
runningThread.stop();
waveData.put(WaveSection.DATA, byteArrayOutputStream.toByteArray());
return waveData;
}
public void run() {
try {
// Create an audio output stream for byte array
byteArrayOutputStream = new ByteArrayOutputStream();
// Write audio input stream to speaker source data line
AudioFormat audioFormat = waveData.createAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat);
TargetDataLine targetDataLine = (TargetDataLine) AudioSystem.getLine(info);
targetDataLine.open(audioFormat);
targetDataLine.start();
// Loop through target data line to write to output stream
int numBytesRead;
byte[] data = new byte[targetDataLine.getBufferSize() / 5];
while(recording) {
numBytesRead = targetDataLine.read(data, 0, data.length);
byteArrayOutputStream.write(data, 0, numBytesRead);
}
// Cleanup
targetDataLine.stop();
targetDataLine.close();
byteArrayOutputStream.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
Convert file to byte array
fileToByteArray("C:\..\my.mp3");
`public static byte[] fileToByteArray(String name){
Path path = Paths.get(name);
try {
return Files.readAllBytes(path);
} catch (IOException e) {
e.printStackTrace();
return null;
}
}`