Android adding AAC ADTS to Mediarecorder PCM - java

My Mediarecorder gives me a PCM File as an output when I record the phone's microphone. Now when trying to listen to this File that it created all I hear is static and I think, if I have understood correctly, I get a PCM file from Mediarecorder not AAC and I need to add ADTS header to the PCM to be able to listen to it.
I have seen threads with custom Encoders but I can not seem to figure out where and what I need to do with them.
I make an output File from microphone recoridng like this:
private static final int CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int SAMPLE_RATE = 44100; //44.1kHz
private static final int BUFFER_SIZE = 2048;
public Status status = Status.IDLE;
private AudioRecordingHandler arh;
private File outputFile;
private Context context;
/**
* Starts script for running. Needs output file to work!
*/
public void start() {
if (outputFile == null) { return; }
System.out.println("Start reading stream...");
aacEncoder = new AACEncoder(SAMPLE_RATE, micOutputPCM);
new Thread(new Runnable() {
#Override
public void run() {
record.startRecording();
byte[] data = new byte[BUFFER_SIZE];
float[] audioFloatBuffer = new float[BUFFER_SIZE/2];
Yin y = new Yin(SAMPLE_RATE, BUFFER_SIZE/2);
while(status == Status.RECORDING) {
record.read(data, 0, BUFFER_SIZE);
audioFloatBuffer = ConverterUtil.toFloatArray(data, 0, audioFloatBuffer,
0, audioFloatBuffer.length);
PitchDetectionResult pdr = y.getPitch(audioFloatBuffer);
aacEncoder.writeIntoOutputfile(data);
arh.handlePitch(pdr.getPitch());
}
aacEncoder.stopEncoding();
}
}).start();
}
/**
* Stops script
*/
public void stop() {
status = Status.IDLE;
record.stop();
arh.finishedRecording(micOutputPCM);
}
Here is how I get the byte[] from the File and where I try to encode the ADTS header to them.
public static File addHeaderToAac(File micOutputPCM, File output) throws IOException {
byte[] pcmFile = fullyReadFileToBytes(micOutputPCM);
int bufferSize = 2048;
//addADTSHeader to byte[] and return a File object
return fileWithADTSHeader;
}
public static byte[] fullyReadFileToBytes(File f) throws IOException {
int size = (int) f.length();
byte bytes[] = new byte[size];
byte tmpBuff[] = new byte[size];
FileInputStream fis= new FileInputStream(f);;
try {
int read = fis.read(bytes, 0, size);
if (read < size) {
int remain = size - read;
while (remain > 0) {
read = fis.read(tmpBuff, 0, remain);
System.arraycopy(tmpBuff, 0, bytes, size - remain, read);
remain -= read;
}
}
} catch (IOException e){
throw e;
} finally {
fis.close();
}
return bytes;
}
My question is, does anyone have an Encoder that can accept a File or byte[] or ByteStream as an input and return a File.
Because ultimately I want to make a mp4parser AACTrackImpl, which can be found here : https://github.com/sannies/mp4parser
AACTrackImpl aacTrack2 = new MP3TrackImpl(new FileDataSourceImpl(micOutputPCM));
Also If I am missing some important details about how to convert and what I should do to be able to play it then that information will also be useful.
If I need provide more information in order to answer this question, then I will gladly do so.
Edit:
I've been trying to make an encoder that would do what I need, but so far I have had no success.
public static File addHeaderToAac(File pcmFile1, File output, Context context) throws IOException {
byte[] pcmFile = fullyReadFileToBytes(pcmFile1);
int bufferSize = 2048;
AACEncoder encoder = new AACEncoder(44100, output);
encoder.encodeAudioFrameToAAC(pcmFile);
return output;
}
I am trying to encode the PCM to AAC with this encoder, but this encoder writes the output file to memory, but I need an object. And when I give it my byte[] it also gives me an error :
W/System.err: at java.nio.ByteBuffer.put(ByteBuffer.java:642)
And the error is coming from this line :
inputBuf.put(frameData);
Finally, my encoder:
public class AACEncoder {
final String TAG = "UEncoder Processor";
final int sampleRate;
File outputFile;
FileOutputStream fos;
final int TIMEOUT_USEC = 10000 ;
MediaCodec encoder;
boolean isEncoderRunning = false;
boolean outputDone = false;
MediaCodec.BufferInfo info;
public AACEncoder(final int sampleRate, File outputFile) {
this.sampleRate = sampleRate;
this.info = new MediaCodec.BufferInfo();
this.outputFile = outputFile;
openFileStream();
initEncoder();
}
/**
* Initializes CrappyEncoder for AAC-LC (Low complexity)
* #throws Exception
*/
public void initEncoder() {
try {
encoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRate);
format.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IOException ex) {
Log.e(TAG, "Failed to create CrappyEncoder");
ex.printStackTrace();
}
}
int generateIndex = 0;
public void encodeAudioFrameToAAC(byte[] frameData) {
if (encoder == null) return;
if (!isEncoderRunning) {
encoder.start();
isEncoderRunning = true;
}
ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
if (fos != null) {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
long ptsUsec = (System.currentTimeMillis() * 1000) / 10000;
if (outputDone) {
encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
inputBuf.clear();
inputBuf.put(frameData);
encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0);
}
generateIndex++;
}
tryEncodeOutputBuffer();
}
checkIfOutputDone();
}
/**
* Gets data from output buffer and encodes it to
* AAC-LC encoding with ADTS header attached before every frame
*/
private void tryEncodeOutputBuffer() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
//If >= 0 then valid response
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus >= 0) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size + 7);
byte[] data = new byte[info.size + 7];
addADTStoPacket(data, info.size + 7);
encodedData.get(data, 7, info.size);
encodedData.position(info.offset);
writeIntoOutputfile(data);
encoder.releaseOutputBuffer(encoderStatus, false);
}
}
private void checkIfOutputDone() {
if (outputDone) {
if (fos != null) {
try {
fos.close();
} catch (IOException ioe) {
Log.w(TAG, "failed closing debug file");
throw new RuntimeException(ioe);
}
fos = null;
}
}
}
/**
* Add ADTS header at the beginning of each and every AAC packet.
* This is needed as MediaCodec CrappyEncoder generates a packet of raw
* AAC data.
*
* Note the packetLen must count in the ADTS header itself.
**/
private void addADTStoPacket(byte[] packet, int packetLen) {
int profile = 2; //AAC LC
//39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
int freqIdx = 4; //44.1KHz
int chanCfg = 2; //CPE
// fill in ADTS data
packet[0] = (byte)0xFF;
packet[1] = (byte)0xF9;
packet[2] = (byte)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2));
packet[3] = (byte)(((chanCfg&3)<<6) + (packetLen>>11));
packet[4] = (byte)((packetLen&0x7FF) >> 3);
packet[5] = (byte)(((packetLen&7)<<5) + 0x1F);
packet[6] = (byte)0xFC;
}
private void openFileStream() {
fos = null;
try {
fos = new FileOutputStream(outputFile, false);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
/**
* Writes data into file
* #param data
*/
public void writeIntoOutputfile(byte[] data) {
try {
fos.write(data);
} catch (IOException ioe) {
Log.w(TAG, "failed writing debug data to file");
throw new RuntimeException(ioe);
}
}
public void stopEncoding() {
isEncoderRunning = false;
encoder.stop();
closeStream();
}
private void closeStream() {
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}

Related

Discontinuous FTP download throws "Read timed out" or "Connection reset"

I used FTP and FTPClient in package 'org.apache.commons.net.ftp' to download files from FTP server.
Here is my total example code
public class FtpInput {
private static final Logger LOG = Logger.getLogger(FtpInput.class);
private static final int TIMEOUT = 120000;
private static final String SIZE_COMMAND_REPLY_CODE = "213 ";
/**
* FTPClient
*/
private FTPClient ftpClient;
/**
* FTP size
*/
private long completeFileSize = 0;
protected String ip = "";
protected int port = 21;
protected String user = "";
protected String passwd = "";
protected String path = "";
protected String fileName = "";
/**
* count input bytes
*/
private CountingInputStream is;
/**
* the bytes already processed
*/
private long processedBytesNum;
private byte[] inputBuffer = new byte[1024];
/**
* connect to ftp server and fetch inputStream
*/
public void connect() {
this.ftpClient = new FTPClient();
ftpClient.setRemoteVerificationEnabled(false);
try {
ftpClient.connect(ip, port);
if (!ftpClient.login(user, passwd)) {
throw new IOException("ftp login failed!");
}
if (StringUtils.isNotBlank(path)) {
if (!ftpClient.changeWorkingDirectory(path)) {
ftpClient.mkd(path);
if (!ftpClient.changeWorkingDirectory(path)) {
throw new IOException("ftp change working dir failed! path:" + path);
}
}
}
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
ftpClient.setSoTimeout(TIMEOUT);
ftpClient.setConnectTimeout(TIMEOUT);
ftpClient.setDataTimeout(TIMEOUT);
ftpClient.enterLocalPassiveMode();
// keep control channel keep-alive when download large file
ftpClient.setControlKeepAliveTimeout(120);
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException("ftp login failed!", e);
}
// get complete ftp size
completeFileSize = getFtpFileSize();
LOG.info(String.format("ftp file size: %d", completeFileSize));
try {
InputStream ftpis = this.ftpClient.retrieveFileStream(this.fileName);
if (ftpis == null) {
LOG.error("cannot fetch source file.");
}
this.is = new CountingInputStream(ftpis);
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
/**
* readBytes
*
* #return
*/
public byte[] readBytes() {
byte[] bytes = readBytesFromStream(is, inputBuffer);
// the bytes processed
processedBytesNum = is.getCount();
return bytes;
}
/**
* readBytesFromStream
*
* #param stream
* #param inputBuffer
* #return
*/
protected byte[] readBytesFromStream(InputStream stream, byte[] inputBuffer) {
Preconditions.checkNotNull(stream != null, "InputStream has not been inited yet.");
Preconditions.checkArgument(inputBuffer != null && inputBuffer.length > 0);
int readBytes;
try {
readBytes = stream.read(inputBuffer);
} catch (IOException e) {
throw new RuntimeException(e);
}
if (readBytes == inputBuffer.length) {
// inputBuffer is filled full.
return inputBuffer;
} else if (readBytes > 0 && readBytes < inputBuffer.length) {
// inputBuffer is not filled full.
byte[] tmpBytes = new byte[readBytes];
System.arraycopy(inputBuffer, 0, tmpBytes, 0, readBytes);
return tmpBytes;
} else if (readBytes == -1) {
// Read end.
return null;
} else {
// may other situation happens?
throw new RuntimeException(String.format("readBytesFromStream: readBytes=%s inputBuffer.length=%s",
readBytes, inputBuffer.length));
}
}
/**
* fetch the byte size of remote file size
*/
private long getFtpFileSize() {
try {
ftpClient.sendCommand("SIZE", this.fileName);
String reply = ftpClient.getReplyString().trim();
LOG.info(String.format("ftp file %s size reply : %s", fileName, reply));
Preconditions.checkArgument(reply.startsWith(SIZE_COMMAND_REPLY_CODE),
"ftp file size reply: %s is not success", reply);
String sizeSubStr = reply.substring(SIZE_COMMAND_REPLY_CODE.length());
long actualFtpSize = Long.parseLong(sizeSubStr);
return actualFtpSize;
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
public void close() {
try {
if (is != null) {
LOG.info(String.format("already read %d bytes from ftp file %s", is.getCount(), fileName));
is.close();
}
if (ftpClient != null) {
// Must call completePendingCommand() to finish command.
boolean isSuccessTransfer = ftpClient.completePendingCommand();
if (!isSuccessTransfer) {
LOG.error("error happened when complete transfer of ftp");
}
ftpClient.logout();
ftpClient.disconnect();
}
} catch (Throwable e) {
e.printStackTrace();
LOG.error(String.format("Close ftp input failed:%s,%s", e.getMessage(), e.getCause()));
} finally {
is = null;
ftpClient = null;
}
}
public void validInputComplete() {
Preconditions.checkArgument(processedBytesNum == completeFileSize, "ftp file transfer is not complete");
}
/**
* main
*
* #param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
String ip = "***.***.***.****";
int port = 21;
String user = "***";
String passwd = "***";
String path = "/home/work";
String fileName = "b.txt";
FtpInput input = new FtpInput();
try {
input.fileName = fileName;
input.path = path;
input.ip = ip;
input.port = port;
input.user = user;
input.passwd = passwd;
// connect to FTP server
input.connect();
while (true) {
// read bytes
byte[] bytes = input.readBytes();
if (bytes == null) {
break;
}
LOG.info("read " + bytes.length + " bytes at :" + new Date(System.currentTimeMillis()));
// Attention: this is used for simulating the process of writing data into hive table
// it maybe consume more than 1 minute;
Thread.sleep(3000);
}
input.validInputComplete();
} catch (Exception e) {
e.printStackTrace();
} finally {
input.close();
}
}
}
here is the exception message:
java.net.SocketTimeoutException: Read timed out
or
java.net.SocketException: Connection reset
at stream.readBytes in method readBytesFromStream
At first, i think it probably caused by writing into hive table slowly, and then the FTP Server closed the connection.
But actually, the speed of writing into hive table is fast enough.
Now, i need your help, how can i fix this problem.
From your comments, it looks like it can take hours before you finish downloading the file.
You cannot reasonably expect an FTP server to wait for you for hours to finish the transfer. Particularly if you are not transferring anything most of the time. You waste server resources and most servers will protect themselves against such abuse.
Your design is flawed.
You should redesign your application to first fully download the file; and import the file only after the download finishes.

Android basic TTS engine from audio file

I have an mp3 file hello.mp3. I am wrapping the mp3 into FileInputStream and converting the input stream to bytes, then pushing the bytes to SynthesisCallback.audioAvailable(bytes,offset,length) but this results to just noise.The file hello.mp3 plays just fine if I load it to my Android Music play.
Why is this not working when I push bytes from the file to SnthesisCallback? I have pasted my code below.
This is where I generate the Audio stream from mp3 file:
class AudioStream {
InputStream stream;
int length;
}
private AudioStream getAudioStream(String text) throws IOException {
// TODO parse text, and generate audio file.
File hello = new File(Environment.getExternalStorageDirectory(), "hello.mp3");
AudioStream astream = new AudioStream();
astream.length = hello.length();
astream.stream = new FileInputStream(hello);
return astream;
}
This is my Inputstream to byte[] method.
public byte[] inputStreamToByteArray(AudioStream inStream) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buffer = new byte[inStream.length];
int bytesRead;
while ((bytesRead = inStream.stream.read(buffer)) > 0) {
baos.write(buffer, 0, bytesRead);
}
return baos.toByteArray();
}
This is my onSynthesizeText method in my TextToSpeechService class.
#Override
protected synchronized void onSynthesizeText(SynthesisRequest request,
SynthesisCallback callback) {
// TODO load language and other checks.
// At this point, we have loaded the language
callback.start(16000,
AudioFormat.ENCODING_PCM_16BIT, 1 /* Number of channels. */);
final String text = request.getText().toLowerCase();
try {
Log.i(TAG, "Getting audio stream for text "+text);
AudioStream aStream = getAudioStream(text);
byte[] bytes = inputStreamToByteArray(aStream);
final int maxBufferSize = callback.getMaxBufferSize();
int offset = 0;
while (offset < aStream.length) {
int bytesToWrite = Math.min(maxBufferSize, aStream.length - offset);
callback.audioAvailable(bytes, offset, bytesToWrite);
offset += bytesToWrite;
}
} catch (Exception e) {
e.printStackTrace();
callback.error();
}
// Alright, we're done with our synthesis - yay!
callback.done();
}
This is how I am testing my synthesis-engine-in the making.
//initialize text speech
textToSpeech = new TextToSpeech(this, new OnInitListener() {
/**
* a callback to be invoked indicating the completion of the TextToSpeech
* engine initialization.
*/
#Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
int result = textToSpeech.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) {
Log.e("error", "Language is not supported");
} else {
convertToSpeech("Hello");
}
} else {
Log.e("error", "Failed to Initilize!");
}
}
/**
* Speaks the string using the specified queuing strategy and speech parameters.
*/
private void convertToSpeech(String text) {
if (null == text || "".equals(text)) {
return;
}
textToSpeech.speak(text, TextToSpeech.QUEUE_FLUSH, null);
}
});
The function audioAvailable(byte[] buffer, int offset, int length) expects PCM samples as input. You cannot read bytes from .mp3 file and use it as input to the function. You need to use .wav file or first convert .mp3 file to .wav file and use it as input to audioAvailable function.

Program for file transfer in java using sockets

I have written a java code to transfer files from one server to another using the concept of socket programming. I got the codes from another java forum that meet my requirements. The program is said to transfer large sized files (like .mkv , .mprg movies) from one machine to another and can be used to transfer files of all formats. But after running the codes I found that the program is not able to transfer large sized files such as movies and even pdf of sizes 80mb or 111mb. The program has used bytebuffer but still error occurs. The codes are as follows (I got them from this site http://www.coderpanda.com/java-socket-programming-transferring-large-sized-files-through-socket/)
ClientMain.java
import java.io.IOException;
import java.net.Socket;
public class ClientMain
{
private DirectoryTxr transmitter = null;
Socket clientSocket = null;
private boolean connectedStatus = false;
private String ipAddress;
String srcPath = null;
String dstPath = "";
public ClientMain()
{
}
public void setIpAddress(String ip)
{
this.ipAddress = ip;
}
public void setSrcPath(String path)
{
this.srcPath = path;
}
public void setDstPath(String path)
{
this.dstPath = path;
}
private void createConnection()
{
Runnable connectRunnable = new Runnable()
{
public void run()
{
while (!connectedStatus)
{
try
{
clientSocket = new Socket(ipAddress, 22);
connectedStatus = true;
transmitter = new DirectoryTxr(clientSocket, srcPath, dstPath);
}
catch (IOException io)
{
io.printStackTrace();
}
}
}
};
Thread connectionThread = new Thread(connectRunnable);
connectionThread.start();
}
public static void main(String[] args)
{
ClientMain main = new ClientMain();
main.setIpAddress("localHost");
main.setSrcPath("C:/Transfer/");
main.setDstPath("C:/Receive");
main.createConnection();
}
}
DirectoryTxr.java
import java.io.*;
import java.net.Socket;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
public class DirectoryTxr
{
Socket clientSocket = null;
String srcDir = null;
String dstDir = null;
byte[] readBuffer = new byte[1024];
private InputStream inStream = null;
private OutputStream outStream = null;
int state = 0;
final int permissionReqState = 1;
final int initialState = 0;
final int dirHeaderSendState = 2;
final int fileHeaderSendState = 3;
final int fileSendState = 4;
final int fileFinishedState = 5;
private boolean isLive = false;
private int numFiles = 0;
private int filePointer = 0;
String request = "May I send?";
String respServer = "Yes,You can";
String dirResponse = "Directory created...Please send files";
String fileHeaderRecvd = "File header received ...Send File";
String fileReceived = "File Received";
String dirFailedResponse = "Failed";
File[] opFileList = null;
public DirectoryTxr(Socket clientSocket, String srcDir, String dstDir)
{
try
{
this.clientSocket = clientSocket;
inStream = clientSocket.getInputStream();
outStream = clientSocket.getOutputStream();
isLive = true;
this.srcDir = srcDir;
this.dstDir = dstDir;
state = initialState;
readResponse();
sendMessage(request);
state = permissionReqState;
}
catch (IOException io)
{
io.printStackTrace();
}
}
private void sendMessage(String message)
{
try
{
sendBytes(request.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private void readResponse()
{
Runnable readRunnable = new Runnable()
{
public void run()
{
while (isLive)
{
try
{
int num = inStream.read(readBuffer);
if (num > 0)
{
byte[] tempArray = new byte[num];
System.arraycopy(readBuffer, 0, tempArray, 0, num);
processBytes(tempArray);
}
}
catch (SocketException se)
{
System.exit(0);
}
catch (IOException io)
{
io.printStackTrace();
isLive = false;
}
}
}
};
Thread readThread = new Thread(readRunnable);
readThread.start();
}
private void sendDirectoryHeader()
{
File file = new File(srcDir);
if (file.isDirectory())
{
try
{
String[] childFiles = file.list();
numFiles = childFiles.length;
String dirHeader = "$" + dstDir + "#" + numFiles + "&";
sendBytes(dirHeader.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException en)
{
en.printStackTrace();
}
}
else
{
System.out.println(srcDir + " is not a valid directory");
}
}
private void sendFile(String dirName)
{
File file = new File(dirName);
if (!file.isDirectory())
{
try
{
int len = (int) file.length();
int buffSize = len / 8;
RandomAccessFile raf = new RandomAccessFile(file, "rw");
FileChannel channel = raf.getChannel();
int numRead = 0;
while (numRead >= 0)
{
ByteBuffer buf = ByteBuffer.allocate(1024 * 100000);
numRead = channel.read(buf);
if (numRead > 0)
{
byte[] array = new byte[numRead];
System.arraycopy(buf.array(), 0, array, 0, numRead);
sendBytes(array);
}
}
System.out.println("Finished");
}
catch (IOException io)
{
io.printStackTrace();
}
}
}
private void sendHeader(String fileName)
{
try
{
File file = new File(fileName);
if (file.isDirectory())
return;
String header = "&" + fileName + "#" + file.length() + "*";
sendHeader(header);
sendBytes(header.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private void sendBytes(byte[] dataBytes)
{
synchronized (clientSocket)
{
if (outStream != null)
{
try
{
outStream.write(dataBytes);
outStream.flush();
}
catch (IOException io)
{
io.printStackTrace();
}
}
}
}
private void processBytes(byte[] data)
{
try
{
String parsedMessage = new String(data, "UTF-8");
System.out.println(parsedMessage);
setResponse(parsedMessage);
}
catch (UnsupportedEncodingException u)
{
u.printStackTrace();
}
}
private void setResponse(String message)
{
if (message.trim().equalsIgnoreCase(respServer) && state == permissionReqState)
{
state = dirHeaderSendState;
sendDirectoryHeader();
}
else if (message.trim().equalsIgnoreCase(dirResponse) && state == dirHeaderSendState)
{
state = fileHeaderSendState;
if (LocateDirectory())
{
createAndSendHeader();
}
else
{
System.out.println("Vacant or invalid directory");
}
}
else if (message.trim().equalsIgnoreCase(fileHeaderRecvd) && state == fileHeaderSendState)
{
state = fileSendState;
sendFile(opFileList[filePointer].toString());
state = fileFinishedState;
filePointer++;
}
else if (message.trim().equalsIgnoreCase(fileReceived) && state == fileFinishedState)
{
if (filePointer < numFiles)
{
createAndSendHeader();
}
System.out.println("Successfully sent");
}
else if (message.trim().equalsIgnoreCase(dirFailedResponse))
{
System.out.println("Going to exit....Error ");
}
else if (message.trim().equalsIgnoreCase("Thanks"))
{
System.out.println("All files were copied");
}
}
private void closeSocket()
{
try
{
clientSocket.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
private boolean LocateDirectory()
{
boolean status = false;
File file = new File(srcDir);
if (file.isDirectory())
{
opFileList = file.listFiles();
numFiles = opFileList.length;
if (numFiles <= 0)
{
System.out.println("No files found");
}
else
{
status = true;
}
}
return status;
}
private void createAndSendHeader()
{
File opFile = opFileList[filePointer];
String header = "&" + opFile.getName() + "#" + opFile.length() + "*";
try
{
state = fileHeaderSendState;
sendBytes(header.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException e)
{
}
}
private void sendListFiles()
{
createAndSendHeader();
}
}
ServerMain.java
public class ServerMain {
public ServerMain() {
}
public static void main(String[] args) {
DirectoryRcr dirRcr = new DirectoryRcr();
}
}
DirectoryRcr.java
import java.io.*;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
public class DirectoryRcr
{
String request = "May I send?";
String respServer = "Yes,You can";
String dirResponse = "Directory created...Please send files";
String dirFailedResponse = "Failed";
String fileHeaderRecvd = "File header received ...Send File";
String fileReceived = "File Received";
Socket socket = null;
OutputStream ioStream = null;
InputStream inStream = null;
boolean isLive = false;
int state = 0;
final int initialState = 0;
final int dirHeaderWait = 1;
final int dirWaitState = 2;
final int fileHeaderWaitState = 3;
final int fileContentWaitState = 4;
final int fileReceiveState = 5;
final int fileReceivedState = 6;
final int finalState = 7;
byte[] readBuffer = new byte[1024 * 100000];
long fileSize = 0;
String dir = "";
FileOutputStream foStream = null;
int fileCount = 0;
File dstFile = null;
public DirectoryRcr()
{
acceptConnection();
}
private void acceptConnection()
{
try
{
ServerSocket server = new ServerSocket(22);
socket = server.accept();
isLive = true;
ioStream = socket.getOutputStream();
inStream = socket.getInputStream();
state = initialState;
startReadThread();
}
catch (IOException io)
{
io.printStackTrace();
}
}
private void startReadThread()
{
Thread readRunnable = new Thread()
{
public void run()
{
while (isLive)
{
try
{
int num = inStream.read(readBuffer);
if (num > 0)
{
byte[] tempArray = new byte[num];
System.arraycopy(readBuffer, 0, tempArray, 0, num);
processBytes(tempArray);
}
sleep(100);
} catch (SocketException s)
{
}
catch (IOException e)
{
e.printStackTrace();
}
catch (InterruptedException i)
{
i.printStackTrace();
}
}
}
};
Thread readThread = new Thread(readRunnable);
readThread.start();
}
private void processBytes(byte[] buff) throws InterruptedException
{
if (state == fileReceiveState || state == fileContentWaitState)
{
if (state == fileContentWaitState)
state = fileReceiveState;
fileSize = fileSize - buff.length;
writeToFile(buff);
if (fileSize == 0)
{
state = fileReceivedState;
try
{
foStream.close();
}
catch (IOException io)
{
io.printStackTrace();
}
System.out.println("Received " + dstFile.getName());
sendResponse(fileReceived);
fileCount--;
if (fileCount != 0)
{
state = fileHeaderWaitState;
}
else
{
System.out.println("Finished");
state = finalState;
sendResponse("Thanks");
Thread.sleep(2000);
System.exit(0);
}
System.out.println("Received");
}
}
else
{
parseToUTF(buff);
}
}
private void parseToUTF(byte[] data)
{
try
{
String parsedMessage = new String(data, "UTF-8");
System.out.println(parsedMessage);
setResponse(parsedMessage);
}
catch (UnsupportedEncodingException u) {
u.printStackTrace();
}
}
private void setResponse(String message)
{
if (message.trim().equalsIgnoreCase(request) && state == initialState)
{
sendResponse(respServer);
state = dirHeaderWait;
}
else if (state == dirHeaderWait)
{
if (createDirectory(message))
{
sendResponse(dirResponse);
state = fileHeaderWaitState;
}
else
{
sendResponse(dirFailedResponse);
System.out.println("Error occurred...Going to exit");
System.exit(0);
}
} else if (state == fileHeaderWaitState)
{
createFile(message);
state = fileContentWaitState;
sendResponse(fileHeaderRecvd);
}
else if (message.trim().equalsIgnoreCase(dirFailedResponse))
{
System.out.println("Error occurred ....");
System.exit(0);
}
}
private void sendResponse(String resp)
{
try
{
sendBytes(resp.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private boolean createDirectory(String dirName)
{
boolean status = false;
dir = dirName.substring(dirName.indexOf("$") + 1, dirName.indexOf("#"));
fileCount = Integer.parseInt(dirName.substring(dirName.indexOf("#") + 1, dirName.indexOf("&")));
if (new File(dir).mkdir())
{
status = true;
System.out.println("Successfully created directory " + dirName);
}
else if (new File(dir).mkdirs())
{
status = true;
System.out.println("Directories were created " + dirName);
}
else if (new File(dir).exists())
{
status = true;
System.out.println("Directory exists" + dirName);
}
else
{
System.out.println("Could not create directory " + dirName);
status = false;
}
return status;
}
private void createFile(String fileName)
{
String file = fileName.substring(fileName.indexOf("&") + 1, fileName.indexOf("#"));
String lengthFile = fileName.substring(fileName.indexOf("#") + 1, fileName.indexOf("*"));
fileSize = Integer.parseInt(lengthFile);
dstFile = new File(dir + "/" + file);
try
{
foStream = new FileOutputStream(dstFile);
System.out.println("Starting to receive " + dstFile.getName());
}
catch (FileNotFoundException fn)
{
fn.printStackTrace();
}
}
private void writeToFile(byte[] buff)
{
try
{
foStream.write(buff);
}
catch (IOException io)
{
io.printStackTrace();
}
}
private void sendBytes(byte[] dataBytes)
{
synchronized (socket)
{
if (ioStream != null)
{
try
{
ioStream.write(dataBytes);
}
catch (IOException io)
{
io.printStackTrace();
}
}
}
}
}
Note that:-
ClientMain.java and DirectoryTxr.java are the two classes under client application.
ServerMain.java and DirectoryRcr.java are the two classes under Server application.
run the ClientMain.java and ServerMain.java simultaneously
Also specify the source directory, destination directory and host address of the machine in which server is running in the ClientMain.java(as per your computer). Here we are not specifying source file ,instead a source directory or folder is specifying.So the entire files of source directory will be transferred.
I would really appreciate if someone can help me with the problem.
You can check this code. This code can send files till 2GB of file size. Checked and working. Basically what I do here is first I send the file size and file name in an object then I start sending the file. First of all let me show you the class whose object will carry the File's details and send it to Receiver.
FileTransfer Class:
public class FileDetails implements Serializable {
String name;
long size;
public void setDetails(String name, long size) {
this.name = name;
this.size = size;
}
public String getName() {
return name;
}
public long getSize() {
return size;
}
}
Sender:
ServerSocket sendServer = null;
Socket sendSocket;
FileDetails details;
byte data[];
try {
File file = new File("File Url");
// Getting file name and size
if (file.length() > Integer.MAX_VALUE) {
System.out.println("File size exceeds 2 GB");
} else {
sendServer = new ServerSocket(5050);
System.out.println("Waiting for Client...");
sendSocket = sendServer.accept();
// File Object for accesing file Details
System.out.println("Connected to Client...");
data = new byte[2048]; // Here you can increase the size also which will send it faster
details = new FileDetails();
details.setDetails(file.getName(), file.length());
// Sending file details to the client
System.out.println("Sending file details...");
ObjectOutputStream sendDetails = new ObjectOutputStream(sendSocket.getOutputStream());
sendDetails.writeObject(details);
sendDetails.flush();
// Sending File Data
System.out.println("Sending file data...");
FileInputStream fileStream = new FileInputStream(file);
BufferedInputStream fileBuffer = new BufferedInputStream(fileStream);
OutputStream out = sendSocket.getOutputStream();
int count;
while ((count = fileBuffer.read(data)) > 0) {
System.out.println("Data Sent : " + count);
out.write(data, 0, count);
out.flush();
}
out.close();
fileBuffer.close();
fileStream.close();
}
} catch (Exception e) {
System.out.println("Error : " + e.toString());
}
Receiver's code :
int port = 5050;
try {
System.out.println("Connecting to Server...");
Socket receiveSocket = new Socket("IP of Server", port);
System.out.println("Connected to Server...");
// Getting file details
System.out.println("Getting details from Server...");
ObjectInputStream getDetails = new ObjectInputStream(receiveSocket.getInputStream());
FileDetails details = (FileDetails) getDetails.readObject();
System.out.println("Now receiving file...");
// Storing file name and sizes
String fileName = details.getName();
System.out.println("File Name : " + fileName);
byte data[] = new byte[2048]; // Here you can increase the size also which will receive it faster
FileOutputStream fileOut = new FileOutputStream("D:\\" + fileName);
InputStream fileIn = receiveSocket.getInputStream();
BufferedOutputStream fileBuffer = new BufferedOutputStream(fileOut);
int count;
int sum = 0;
while ((count = fileIn.read(data)) > 0) {
sum += count;
fileBuffer.write(data, 0, count);
System.out.println("Data received : " + sum);
fileBuffer.flush();
}
System.out.println("File Received...");
fileBuffer.close();
fileIn.close();
} catch (Exception e) {
System.out.println("Error : " + e.toString());
}
Hope this helps you.
You don't need all this. The canonical way to copy a stream in Java is as follows. It works for any buffer size greater than zero. I generally use 8192. There is certainly no necessity to read entire files into memory. It just wastes time and space.
while ((count = in.read(buffer)) > 0)
{
out.write(buffer, 0, count);
}
If you know the size in advance and need to keep the socket open for another transfer:
while (total < length && (count = in.read(buffer, 0, length-total > buffer.length ? buffer.length : (int)(length-total))) > 0)
{
out.write(buffer, 0, count);
total += count;
}
where total is a long initialized to zero before this loop, and length is the length you know in advance.

How to convert JPG image to DICOM file using dcm4che-3.2.1?

I can set the attributes and create the dicom file, but I can not write the image to the dicom file?
I've tried it with an image I have and it works but I expect it won't work for RGB images. Something like this though
BufferedImage jpg = ImageIO.read(new File("myjpg.jpg"));
//Convert the image to a byte array
DataBufferUShort buff = (DataBufferUShort) jpg.getData().getDataBuffer();
short[] data = buff.getData();
ByteBuffer byteBuf = ByteBuffer.allocate(2*data.length);
int i = 0;
while (data.length > i) {
byteBuf.putShort(data[i]);
i++;
}
//Copy a header
DicomInputStream dis = new DicomInputStream(new File("fileToCopyheaderFrom.dcm"));
Attributes meta = dis.readFileMetaInformation();
Attributes attribs = dis.readDataset(-1, Tag.PixelData);
dis.close();
//Change the rows and columns
attribs.setInt(Tag.Rows, VR.US, jpg.getHeight());
attribs.setInt(Tag.Columns, VR.US, jpg.getWidth());
System.out.println(byteBuf.array().length);
//Attributes attribs = new Attributes();
//Write the file
attribs.setBytes(Tag.PixelData, VR.OW, byteBuf.array());
DicomOutputStream dcmo = new DicomOutputStream(new File("myDicom.dcm"));
dcmo.writeFileMetaInformation(meta);
attribs.writeTo(dcmo);
dcmo.close();
Edit 1
I've assumed your image has an Unsigned short Data Buffer here.
DataBufferUShort buff = (DataBufferUShort) jpg.getData().getDataBuffer();
To handle other data buffers you should check the type , cast acordingly and then convert to a byte array. For a byte Buffer it should be easy
DataBufferByte buff = (DataBufferByte) jpg.getData().getDataBuffer();
Then
buff.getData(numOfBank)
where numOfBank is 0 for you image
should return a byte array
This Code is Convert JPG file to Dicom File
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import org.dcm4che2.data.DicomObject;
import org.dcm4che2.data.Tag;
import org.dcm4che2.data.VR;
import org.dcm4che2.io.DicomInputStream;
import org.dcm4che2.io.DicomOutputStream;
public class JPG_TO_DCM {
public static void main(String a[]) throws FileNotFoundException, IOException
{
// System.out.println("JPG2DCM_Convert.main()");
String patientID="123"; // Enter Patient ID
String patientName="DCM IMAGE"; // Enter Pantient Name
String studyID="123"; // Enter Study ID
Jpg2Dcm jpg2Dcm = new Jpg2Dcm(); //
File jpgFileOS = new File("D:\\AA.jpg");
File dcmFileOS = new File("D:\\AA.dcm");
try {
jpg2Dcm.convert(jpgFileOS, dcmFileOS); // Only Convert Jpg to Dicom without seting any dicom tag (without DicomHeader)
} catch (IOException e) {
e.printStackTrace();
}
try {
anonymizeDicom(new File("D:\\AA.dcm"),new File("D:\\AA_FIANAL.dcm"),patientID,studyID,patientName); // Set the DICOM Header
} catch (Exception e) {
e.printStackTrace();
}
}
public static final int[] TAGS = {
Tag.PatientID,
Tag.StudyID,
Tag.PatientName
};
public static void anonymizeDicom(File fileInput, File fileOutput, String patientIDForUpdate, String studyIDForUpdate,String patientNameForUpdate) throws FileNotFoundException, IOException
{
int patientID = 1048608;
int studyID = 2097168;
int patientName=1048592;
try
{
FileInputStream fis = new FileInputStream(fileInput);
DicomInputStream dis = new DicomInputStream(fis);
DicomObject obj = dis.readDicomObject();
for (int tag : TAGS)
{
if (tag == patientID) {
replaceTag(obj, tag, patientIDForUpdate);
}
if (tag == studyID) {
replaceTag(obj, tag, studyIDForUpdate);
}
if (tag == patientName) {
replaceTag(obj, tag, patientNameForUpdate);
}
}
fis.close();
dis.close();
writeDicomFile(obj, fileOutput);
} catch (Exception e) {
e.printStackTrace();
}
}
#SuppressWarnings("unused")
private static String[] getValue(DicomObject object, int[] PATIENT_ADDITIONAL_TAGS)
{
String [] value = new String [PATIENT_ADDITIONAL_TAGS.length];
int i =0;
while (i<PATIENT_ADDITIONAL_TAGS.length)
{
for (int tag : PATIENT_ADDITIONAL_TAGS)
{
value[i]=object.getString(tag);
i++;
}
}
return value;
}
public static void replaceTag(DicomObject dObj, int tag, String newValue) {
if (tag != 0 && dObj.contains(tag)) {
VR vr = dObj.vrOf(tag);
try
{
dObj.putString(tag, vr, newValue);
} catch (Exception e) {
// System.err.println("Error replacing Tag: " + tag+ " with new value: " + newValue);
}
}
}
public static void writeDicomFile(DicomObject dObj, File f) {
FileOutputStream fos;
try
{
fos = new FileOutputStream(f);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
return;
}
BufferedOutputStream bos = new BufferedOutputStream(fos);
DicomOutputStream dos = new DicomOutputStream(bos);
try {
dos.writeDicomFile(dObj);
} catch (IOException e) {
e.printStackTrace();
return;
} finally {
try {
dos.close();
} catch (IOException ignore) {
}
}
}
static int count1 = 0;
public static void getFile(String dirPath, String destdirPath,String patientID, String studyID,String patientNameForUpdate) throws FileNotFoundException,IOException
{
File f = new File(dirPath);
File[] files = f.listFiles();
if (files != null)
for (int i = 0; i < files.length; i++) {
File file = files[i];
if (file.isDirectory()) {
getFile(file.getAbsolutePath(), destdirPath, patientID,studyID,patientNameForUpdate);
} else {
count1++;
String name = file.getAbsolutePath();
// test7 test = new test7();
anonymizeDicom(new File(name), new File(destdirPath+ "/" + file.getName()), patientID, studyID,patientNameForUpdate);
}
}
}
}
Jpg2Dcm.java File
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.dcm4che2.data.BasicDicomObject;
import org.dcm4che2.data.DicomObject;
import org.dcm4che2.data.Tag;
import org.dcm4che2.data.UID;
import org.dcm4che2.data.VR;
import org.dcm4che2.io.DicomOutputStream;
import org.dcm4che2.util.UIDUtils;
public class Jpg2Dcm {
private static final String USAGE =
"jpg2dcm [Options] <jpgfile> <dcmfile>";
private static final String DESCRIPTION =
"Encapsulate JPEG Image into DICOM Object.\nOptions:";
private static final String EXAMPLE =
"--\nExample 1: Encapulate JPEG Image verbatim with default values " +
"for mandatory DICOM attributes into DICOM Secondary Capture Image:" +
"\n$ jpg2dcm image.jpg image.dcm" +
"\n--\nExample 2: Encapulate JPEG Image without application segments " +
"and additional DICOM attributes to mandatory defaults into DICOM " +
"Image Object:" +
"\n$ jpg2dcm --no-appn -c patattrs.cfg homer.jpg image.dcm" +
"\n--\nExample 3: Encapulate MPEG2 Video with specified DICOM " +
"attributes into DICOM Video Object:" +
"\n$ jpg2dcm --mpeg -C mpg2dcm.cfg video.mpg video.dcm";
private static final String LONG_OPT_CHARSET = "charset";
private static final String OPT_CHARSET_DESC =
"Specific Character Set code string, ISO_IR 100 by default";
private static final String OPT_AUGMENT_CONFIG_DESC =
"Specifies DICOM attributes included additional to mandatory defaults";
private static final String OPT_REPLACE_CONFIG_DESC =
"Specifies DICOM attributes included instead of mandatory defaults";
private static final String LONG_OPT_TRANSFER_SYNTAX = "transfer-syntax";
private static final String OPT_TRANSFER_SYNTAX_DESC =
"Transfer Syntax; 1.2.840.10008.1.2.4.50 (JPEG Baseline) by default.";
private static final String LONG_OPT_MPEG = "mpeg";
private static final String OPT_MPEG_DESC =
"Same as --transfer-syntax 1.2.840.10008.1.2.4.100 (MPEG2).";
private static final String LONG_OPT_UID_PREFIX = "uid-prefix";
private static final String OPT_UID_PREFIX_DESC =
"Generate UIDs with given prefix, 1.2.40.0.13.1.<host-ip> by default.";
private static final String LONG_OPT_NO_APPN = "no-appn";
private static final String OPT_NO_APPN_DESC =
"Exclude application segments APPn from JPEG stream; " +
"encapsulate JPEG stream verbatim by default.";
private static final String OPT_HELP_DESC =
"Print this message";
private static final String OPT_VERSION_DESC =
"Print the version information and exit";
private static int FF = 0xff;
private static int SOF = 0xc0;
private static int DHT = 0xc4;
private static int DAC = 0xcc;
private static int SOI = 0xd8;
private static int SOS = 0xda;
private static int APP = 0xe0;
private String charset = "ISO_IR 100";
private String transferSyntax = UID.JPEGBaseline1;
private byte[] buffer = new byte[8192];
private int jpgHeaderLen;
private int jpgLen;
private boolean noAPPn = false;
private Properties cfg = new Properties();
public Jpg2Dcm() {
try {
cfg.load(Jpg2Dcm.class.getResourceAsStream("jpg2dcm.cfg"));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public final void setCharset(String charset) {
this.charset = charset;
}
private final void setTransferSyntax(String uid) {
this.transferSyntax = uid;
}
private final void setNoAPPn(boolean noAPPn) {
this.noAPPn = noAPPn;
}
private void loadConfiguration(File cfgFile, boolean augment)
throws IOException {
Properties tmp = augment ? new Properties(cfg) : new Properties();
InputStream in = new BufferedInputStream(new FileInputStream(cfgFile));
try {
tmp.load(in);
} finally {
in.close();
}
cfg = tmp;
}
public void convert(File jpgFile, File dcmFile) throws IOException {
jpgHeaderLen = 0;
jpgLen = (int) jpgFile.length();
DataInputStream jpgInput = new DataInputStream(
new BufferedInputStream(new FileInputStream(jpgFile)));
try {
DicomObject attrs = new BasicDicomObject();
attrs.putString(Tag.SpecificCharacterSet, VR.CS, charset);
for (Enumeration en = cfg.propertyNames(); en.hasMoreElements();) {
String key = (String) en.nextElement();
int[] tagPath = Tag.toTagPath(key);
int last = tagPath.length-1;
VR vr = attrs.vrOf(tagPath[last]);
if (vr == VR.SQ) {
attrs.putSequence(tagPath);
} else {
attrs.putString(tagPath, vr, cfg.getProperty(key));
}
}
if (noAPPn || missingRowsColumnsSamplesPMI(attrs)) {
readHeader(attrs, jpgInput);
}
ensureUS(attrs, Tag.BitsAllocated, 8);
ensureUS(attrs, Tag.BitsStored, attrs.getInt(Tag.BitsAllocated));
ensureUS(attrs, Tag.HighBit, attrs.getInt(Tag.BitsStored) - 1);
ensureUS(attrs, Tag.PixelRepresentation, 0);
ensureUID(attrs, Tag.StudyInstanceUID);
ensureUID(attrs, Tag.SeriesInstanceUID);
ensureUID(attrs, Tag.SOPInstanceUID);
Date now = new Date();
attrs.putDate(Tag.InstanceCreationDate, VR.DA, now);
attrs.putDate(Tag.InstanceCreationTime, VR.TM, now);
attrs.initFileMetaInformation(transferSyntax);
FileOutputStream fos = new FileOutputStream(dcmFile);
BufferedOutputStream bos = new BufferedOutputStream(fos);
DicomOutputStream dos = new DicomOutputStream(bos);
try {
dos.writeDicomFile(attrs);
dos.writeHeader(Tag.PixelData, VR.OB, -1);
dos.writeHeader(Tag.Item, null, 0);
dos.writeHeader(Tag.Item, null, (jpgLen+1)&~1);
dos.write(buffer, 0, jpgHeaderLen);
int r;
while ((r = jpgInput.read(buffer)) > 0) {
dos.write(buffer, 0, r);
}
if ((jpgLen&1) != 0) {
dos.write(0);
}
dos.writeHeader(Tag.SequenceDelimitationItem, null, 0);
} finally {
dos.close();
}
} finally {
jpgInput.close();
}
}
private boolean missingRowsColumnsSamplesPMI(DicomObject attrs) {
return !(attrs.containsValue(Tag.Rows)
&& attrs.containsValue(Tag.Columns)
&& attrs.containsValue(Tag.SamplesPerPixel)
&& attrs.containsValue(Tag.PhotometricInterpretation)
);
}
private void readHeader(DicomObject attrs, DataInputStream jpgInput)
throws IOException {
if (jpgInput.read() != FF || jpgInput.read() != SOI
|| jpgInput.read() != FF) {
throw new IOException(
"JPEG stream does not start with FF D8 FF");
}
int marker = jpgInput.read();
int segmLen;
boolean seenSOF = false;
buffer[0] = (byte) FF;
buffer[1] = (byte) SOI;
buffer[2] = (byte) FF;
buffer[3] = (byte) marker;
jpgHeaderLen = 4;
while (marker != SOS) {
segmLen = jpgInput.readUnsignedShort();
if (buffer.length < jpgHeaderLen + segmLen + 2) {
growBuffer(jpgHeaderLen + segmLen + 2);
}
buffer[jpgHeaderLen++] = (byte) (segmLen >>> 8);
buffer[jpgHeaderLen++] = (byte) segmLen;
jpgInput.readFully(buffer, jpgHeaderLen, segmLen - 2);
if ((marker & 0xf0) == SOF && marker != DHT && marker != DAC) {
seenSOF = true;
int p = buffer[jpgHeaderLen] & 0xff;
int y = ((buffer[jpgHeaderLen+1] & 0xff) << 8)
| (buffer[jpgHeaderLen+2] & 0xff);
int x = ((buffer[jpgHeaderLen+3] & 0xff) << 8)
| (buffer[jpgHeaderLen+4] & 0xff);
int nf = buffer[jpgHeaderLen+5] & 0xff;
attrs.putInt(Tag.SamplesPerPixel, VR.US, nf);
if (nf == 3) {
attrs.putString(Tag.PhotometricInterpretation, VR.CS,
"YBR_FULL_422");
attrs.putInt(Tag.PlanarConfiguration, VR.US, 0);
} else {
attrs.putString(Tag.PhotometricInterpretation, VR.CS,
"MONOCHROME2");
}
attrs.putInt(Tag.Rows, VR.US, y);
attrs.putInt(Tag.Columns, VR.US, x);
attrs.putInt(Tag.BitsAllocated, VR.US, p > 8 ? 16 : 8);
attrs.putInt(Tag.BitsStored, VR.US, p);
attrs.putInt(Tag.HighBit, VR.US, p-1);
attrs.putInt(Tag.PixelRepresentation, VR.US, 0);
}
if (noAPPn & (marker & 0xf0) == APP) {
jpgLen -= segmLen + 2;
jpgHeaderLen -= 4;
} else {
jpgHeaderLen += segmLen - 2;
}
if (jpgInput.read() != FF) {
throw new IOException("Missing SOS segment in JPEG stream");
}
marker = jpgInput.read();
buffer[jpgHeaderLen++] = (byte) FF;
buffer[jpgHeaderLen++] = (byte) marker;
}
if (!seenSOF) {
throw new IOException("Missing SOF segment in JPEG stream");
}
}
private void growBuffer(int minSize) {
int newSize = buffer.length << 1;
while (newSize < minSize) {
newSize <<= 1;
}
byte[] tmp = new byte[newSize];
System.arraycopy(buffer, 0, tmp, 0, jpgHeaderLen);
buffer = tmp;
}
private void ensureUID(DicomObject attrs, int tag) {
if (!attrs.containsValue(tag)) {
attrs.putString(tag, VR.UI, UIDUtils.createUID());
}
}
private void ensureUS(DicomObject attrs, int tag, int val) {
if (!attrs.containsValue(tag)) {
attrs.putInt(tag, VR.US, val);
}
}
public static void main(String[] args) {
try {
CommandLine cl = parse(args);
Jpg2Dcm jpg2Dcm = new Jpg2Dcm();
if (cl.hasOption(LONG_OPT_CHARSET)) {
jpg2Dcm.setCharset(cl.getOptionValue(LONG_OPT_CHARSET));
}
if (cl.hasOption("c")) {
jpg2Dcm.loadConfiguration(new File(cl.getOptionValue("c")), true);
}
if (cl.hasOption("C")) {
jpg2Dcm.loadConfiguration(new File(cl.getOptionValue("C")), false);
}
if (cl.hasOption(LONG_OPT_UID_PREFIX)) {
UIDUtils.setRoot(cl.getOptionValue(LONG_OPT_UID_PREFIX));
}
if (cl.hasOption(LONG_OPT_MPEG)) {
jpg2Dcm.setTransferSyntax(UID.MPEG2);
}
if (cl.hasOption(LONG_OPT_TRANSFER_SYNTAX)) {
jpg2Dcm.setTransferSyntax(
cl.getOptionValue(LONG_OPT_TRANSFER_SYNTAX));
}
jpg2Dcm.setNoAPPn(cl.hasOption(LONG_OPT_NO_APPN));
List argList = cl.getArgList();
File jpgFile = new File("d:\\VIPUL.jpg");
File dcmFile = new File("d:\\aa.dcm");
long start = System.currentTimeMillis();
jpg2Dcm.convert(jpgFile, dcmFile);
long fin = System.currentTimeMillis();
System.out.println("Encapsulated " + jpgFile + " to "
+ dcmFile + " in " + (fin - start) + "ms.");
} catch (IOException e) {
e.printStackTrace();
}
}
private static CommandLine parse(String[] args) {
Options opts = new Options();
OptionBuilder.withArgName("code");
OptionBuilder.hasArg();
OptionBuilder.withDescription(OPT_CHARSET_DESC);
OptionBuilder.withLongOpt(LONG_OPT_CHARSET);
opts.addOption(OptionBuilder.create());
OptionBuilder.withArgName("file");
OptionBuilder.hasArg();
OptionBuilder.withDescription(OPT_AUGMENT_CONFIG_DESC);
opts.addOption(OptionBuilder.create("c"));
OptionBuilder.withArgName("file");
OptionBuilder.hasArg();
OptionBuilder.withDescription(OPT_REPLACE_CONFIG_DESC);
opts.addOption(OptionBuilder.create("C"));
OptionBuilder.withArgName("prefix");
OptionBuilder.hasArg();
OptionBuilder.withDescription(OPT_UID_PREFIX_DESC);
OptionBuilder.withLongOpt(LONG_OPT_UID_PREFIX);
opts.addOption(OptionBuilder.create());
OptionBuilder.withArgName("uid");
OptionBuilder.hasArg();
OptionBuilder.withDescription(OPT_TRANSFER_SYNTAX_DESC);
OptionBuilder.withLongOpt(LONG_OPT_TRANSFER_SYNTAX);
opts.addOption(OptionBuilder.create());
opts.addOption(null, LONG_OPT_MPEG, false, OPT_MPEG_DESC);
opts.addOption(null, LONG_OPT_NO_APPN, false, OPT_NO_APPN_DESC);
opts.addOption("h", "help", false, OPT_HELP_DESC);
opts.addOption("V", "version", false, OPT_VERSION_DESC);
CommandLine cl = null;
try {
cl = new PosixParser().parse(opts, args);
} catch (ParseException e) {
exit("jpg2dcm: " + e.getMessage());
throw new RuntimeException("unreachable");
}
if (cl.hasOption('V')) {
Package p = Jpg2Dcm.class.getPackage();
System.out.println("jpg2dcm v" + p.getImplementationVersion());
System.exit(0);
}
if (cl.hasOption('h') || cl.getArgList().size() != 2) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(USAGE, DESCRIPTION, opts, EXAMPLE);
System.exit(0);
}
return cl;
}
private static void exit(String msg) {
System.err.println(msg);
System.err.println("Try 'jpg2dcm -h' for more information.");
System.exit(1);
}
}
Jpg2Dcm.java file use can also include below
org.dcm4che3.tool.dcm2jpg.Dcm2Jpg.java file
dcm4che-tool-dcm2jpg-3.3.7.jar
Also include this also
commons-cli-1.2.jar
dcm4che-core-2.0.26.jar
log4j-boot.jar
slf4j-api-1.5.0.jar
slf4j-log4j12-1.5.0.jar
Solver:
public jpgdcm(File file, File fileOutput) {
try {
jpgLen = (int) file.length();
BufferedImage jpegImage = ImageIO.read(file);
/*
* We open a try block and then read our Jpeg into a BufferedImage through ImageIO.read() method. If this results in an invalid image so we may throw a new exception.
* Else, we’ve got a valid image and therefore valuable information about it.
* The BufferedImage class has a lot of useful methods for retrieving image data, then let’s save the number of color components (samples per pixel) of our image, the bits
* per pixel and the bits allocated:
*/
if (jpegImage == null) throw new Exception("Invalid file.");
/* We open a try block and then read our Jpeg into a BufferedImage through ImageIO.read() method.
* If this results in an invalid image so we may throw a new exception.
* Else, we’ve got a valid image and therefore valuable information about it.
* The BufferedImage class has a lot of useful methods for retrieving image data,
* then let’s save the number of color components (samples per pixel) of our image,
* the bits per pixel and the bits allocated: */
int colorComponents = jpegImage.getColorModel().getNumColorComponents();
int bitsPerPixel = jpegImage.getColorModel().getPixelSize();
int bitsAllocated = (bitsPerPixel / colorComponents);
int samplesPerPixel = colorComponents;
/*It’s time to start building our Dicom dataset:*/
Attributes dicom = new Attributes();
dicom.setString(Tag.SpecificCharacterSet, VR.CS, "ISO_IR 100");
dicom.setString(Tag.PhotometricInterpretation, VR.CS, samplesPerPixel == 3 ? "YBR_FULL_422" : "MONOCHROME2");
/*The first line creates a new basic Dicom object defined by dcm4che2 toolkit.
*The next one puts header information for Specific Character Set: ISO_IR 100 – it’s the same for ISO-8859-1 – the code for Latin alphabet.
*Finally, the last line puts header information for photometric interpretation (read with or without colors).
*So if our image has samples per pixel equals to 3, it has colors (YBR_FULL_422), else it’s a grayscale image (MONOCHROME2).
The following lines add integer values to our Dicom header. Note that all of them comes from BufferedImage methods.
These values are mandatory when encapsulating. For more information you can check Part 3.5 of Dicom Standard. */
dicom.setInt(Tag.SamplesPerPixel, VR.US, samplesPerPixel);
dicom.setInt(Tag.Rows, VR.US, jpegImage.getHeight());
dicom.setInt(Tag.Columns, VR.US, jpegImage.getWidth());
dicom.setInt(Tag.BitsAllocated, VR.US, bitsAllocated);
dicom.setInt(Tag.BitsStored, VR.US, bitsAllocated);
dicom.setInt(Tag.HighBit, VR.US, bitsAllocated-1);
dicom.setInt(Tag.PixelRepresentation, VR.US, 0);
/*Also, our Dicom header needs information about date and time of creation:*/
dicom.setDate(Tag.InstanceCreationDate, VR.DA, new Date());
dicom.setDate(Tag.InstanceCreationTime, VR.TM, new Date());
/* Every Dicom file has a unique identifier.
* Here we’re generating study, series and Sop instances UIDs.
* You may want to modify these values, but you should to care about their uniqueness.
*/
dicom.setString(Tag.StudyInstanceUID, VR.UI, UIDUtils.createUID());
dicom.setString(Tag.SeriesInstanceUID, VR.UI, UIDUtils.createUID());
dicom.setString(Tag.SOPInstanceUID, VR.UI, UIDUtils.createUID());
dicom.setString(Tag.StudyInstanceUID, VR.UI, UIDUtils.createUID());
dicom.setString(Tag.SeriesInstanceUID, VR.UI, UIDUtils.createUID());
dicom.setString(Tag.SOPInstanceUID, VR.UI, UIDUtils.createUID());
/*Our Dicom header is almost done.
* The following command initiates Dicom metafile information considering JPEGBaseline1 as transfer syntax.
* This means this file has Jpeg data encapsulated instead common medical image pixel data.
* The most common Jpeg files use a subset of the Jpeg standard called baseline Jpeg.
* A baseline Jpeg file contains a single image compressed with the baseline discrete cosine transformation (DCT) and Huffman encoding.
*/
// dicom.initFileMetaInformation(UID.JPEGBaseline1);
/*After initiate the header we can open an output stream for saving our Dicom dataset as follows:*/
Attributes fmi = new Attributes();
fmi.setString(Tag.ImplementationVersionName, VR.SH, "DCM4CHE3");
fmi.setString(Tag.ImplementationClassUID, VR.UI, UIDUtils.createUID());
fmi.setString(Tag.TransferSyntaxUID, VR.UI, transferSyntax);
fmi.setString(Tag.MediaStorageSOPClassUID, VR.UI, transferSyntax);
fmi.setString(Tag.MediaStorageSOPInstanceUID, VR.UI,UIDUtils.createUID());
fmi.setString(Tag.FileMetaInformationVersion, VR.OB, "1");
fmi.setInt(Tag.FileMetaInformationGroupLength, VR.UL, dicom.size()+fmi.size());
DicomOutputStream dos = new DicomOutputStream(fileOutput);
dos.writeDataset(fmi, dicom);
dos.writeHeader(Tag.PixelData, VR.OB, -1);
/*
* The Item Tag (FFFE,E000) is followed by a 4 byte item length field encoding the explicit number of bytes of the item.
* The first item in the sequence of items before the encoded pixel data stream shall be a basic item with length equals to zero:
*/
dos.writeHeader(Tag.Item, null, 0);
/*The next Item then keeps the length of our Jpeg file. */
/*
According to Gunter from dcm4che team we have to take care that
the pixel data fragment length containing the JPEG stream has
an even length.
*/
dos.writeHeader(Tag.Item, null, (jpgLen+1)&~1);
/* Now all we have to do is to fill this item with bytes taken from our Jpeg file*/
FileInputStream fis = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(fis);
DataInputStream dis = new DataInputStream(bis);
byte[] buffer = new byte[65536];
int b;
while ((b = dis.read(buffer)) > 0) {
dos.write(buffer, 0, b);
}
/*Finally, the Dicom Standard tells that we have to put a last Tag:
* a Sequence Delimiter Item (FFFE,E0DD) with length equals to zero.*/
if ((jpgLen&1) != 0) dos.write(0);
dos.writeHeader(Tag.SequenceDelimitationItem, null, 0);
dos.close();
} catch (Exception e) {
e.printStackTrace();
}
}

Unable to play audio encoded with XUGGLER

i am using this code to publish audio to red5 server and testing it via publisher demo to see if i am able to hear what i am publshing.
the packets are getting sent successully but unable to hear audio. please help me out i am not able to figure out the error.
package testx;
import com.xuggle.ferry.IBuffer;
import com.xuggle.xuggler.*;
import com.xuggle.xuggler.IAudioSamples.Format;
import javax.sound.sampled.*;
public class Testx{
private static IContainer outContainer;
private static IContainerFormat outContainerFormat;
private static IStreamCoder outAudioCoder;
private static IStream outAudioStream;
public static void main(String args[])
{
String urlOut ="rtmp://localhost:1935/oflaDemo/xxx";
outContainer = IContainer.make();
outContainerFormat = IContainerFormat.make();
outContainerFormat.setOutputFormat("flv", urlOut, null);
int retVal = outContainer.open(urlOut, IContainer.Type.WRITE, outContainerFormat);
if (retVal < 0) {
System.out.println("Could not open output container");
return;
}
outAudioStream = outContainer.addNewStream(1);
outAudioCoder = outAudioStream.getStreamCoder();
ICodec cdc = ICodec.findEncodingCodec(ICodec.ID.CODEC_ID_AAC);
outAudioCoder.setCodec(cdc);
outAudioCoder.setSampleRate(44100);
outAudioCoder.setChannels(1);
outAudioCoder.setFrameRate(IRational.make(44100,1));
outAudioCoder.setSampleFormat(Format.FMT_S16);
outAudioCoder.setBitRate(64000);
retVal = outAudioCoder.open(null,null);
if (retVal < 0) {
System.out.println("Could not open audio coder");
return;
}
retVal = outContainer.writeHeader();
if (retVal < 0) {
System.out.println("Could not write output FLV header: ");
return ;
}
AudioFormat audioFormat = new AudioFormat(44100,
(int)16,
1,
true, /* xuggler defaults to signed 16 bit samples */
false);
TargetDataLine line = null;
DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat);
if (!AudioSystem.isLineSupported(info)) {
return;
}
// Obtain and open the line.
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(audioFormat);
} catch (LineUnavailableException ex) {
return;
}
// Begin audio capture.
line.start();
long lastPos_out=0;
long audtioTime =0;
long startTime = System.nanoTime();
while(1)
{
byte[] data = new byte[line.getBufferSize()/5];
int sz = line.read(data, 0, data.length);
if(sz>0)
{
long nanoTs = System.nanoTime()-startTime;
IBuffer iBuf = IBuffer.make(null,data,0,sz);
System.out.println(iBuf.toString());
IAudioSamples smp = IAudioSamples.make(iBuf, 1,IAudioSamples.Format.FMT_S16);
smp.setComplete(true,sz/2,44100,1,Format.FMT_S16, nanoTs / 1000);
int samplesConsumed = 0;
while(samplesConsumed<smp.getNumSamples()){
IPacket packet= IPacket.make();
samplesConsumed +=outAudioCoder.encodeAudio(packet, smp, samplesConsumed);
if (packet.isComplete()) {
// packet.setKeyPacket(true);
packet.setPosition(lastPos_out);
packet.setStreamIndex(1);
lastPos_out+=packet.getSize();
outContainer.writePacket(packet);
}
}
}
// try {
// Thread.sleep((long)10000);
//} catch (InterruptedException e) {
// TODO Auto-generated catch block
// e.printStackTrace();
// }
}
}
}
Solved it, my microphone volume was very low and set the rate to 16khz

Categories

Resources