Libspeexjni JNI integration - java

Those are Java functions in question:
targetFunctionPtr1 is public static native long nInitSpxEncoder();
targetFunctionPtr is public static native long nEncodeBuffer(long j12, short[] sArr, byte[] bArr, long j13);
Sample pseudo Java code
long nInitSpxEncoder = LibSpeex.nInitSpxEncoder();
long nEncodeBuffer = LibSpeex.nEncodeBuffer(nInitSpxEncoder, sArr, bArr, 65536);
called below via JNI
char b[16]={0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0x00};
jinput = env->NewStringUTF(b);
jclass stringClass = env->FindClass("java/lang/String");
jmethodID getBytesMId = env->GetMethodID(stringClass, "getBytes", "()[B");
in1 = (jbyteArray)env->CallObjectMethod( jinput, getBytesMId);
jlong init;
init = targetFunctionPtr1(env, CallerObj1);
jshort outCArray[] = {100};
jshortArray retval = env->NewShortArray(10000); // allocate
env->SetShortArrayRegion(retval, 0 , 1, outCArray);
nenc = targetFunctionPtr(env, CallerObj, init, retval, in1, 10);
Don't get why above segfaults. It calls init, that works, but in second call nEncodeBuffer it segfaults. I see in debugger it loads byte array and shorts array, but cannot figure out why it does not return, just loops until segfault.
I mean I call it via JNI but I would expect targetFunctionPtr to return. It seems it loops somehow, until segmentation occurs.
This is sample implementation of the JNI in Java
long nInitSpxEncoder = LibSpeex.nInitSpxEncoder();
if (nInitSpxEncoder != 0) {
int nGetEncoderFrameSize = (int) LibSpeex.nGetEncoderFrameSize(nInitSpxEncoder);
System.err.println(String.format("Frame size = %d", Integer.valueOf(nGetEncoderFrameSize)));
short[] sArr = new short[nGetEncoderFrameSize];
AudioRecord audioRecord2 = null;
byte[] bArr3 = bArr2;
long j12 = 0;
while (j12 < this.f13733d) {
try {
synchronized (this) {
if (audioRecord2 != this.f13731b) {
Log.i("AudioPttRecorder", "source changed");
}
audioRecord = this.f13731b;
}
if (audioRecord == null || audioRecord.read(sArr, 0, nGetEncoderFrameSize) != nGetEncoderFrameSize) {
break;
}
if (this.f13730a != null) {
this.f13730a.a(j12, a(nGetEncoderFrameSize, sArr));
}
long j13 = j12;
int i12 = nGetEncoderFrameSize;
short[] sArr2 = sArr;
long nEncodeBuffer = LibSpeex.nEncodeBuffer(nInitSpxEncoder, sArr, bArr, 65536);
if (nEncodeBuffer >= 0) {
Log.d("AudioPttRecorder", String.format("Write packet len=%d", Long.valueOf(nEncodeBuffer)));
bArr3[0] = (byte) (255 & nEncodeBuffer);
bArr3[1] = (byte) (nEncodeBuffer >> 8);
byte[] bArr4 = bArr3;
bufferedOutputStream.write(bArr4, 0, 2);
bufferedOutputStream.write(bArr, 0, (int) nEncodeBuffer);
bArr3 = bArr4;
nGetEncoderFrameSize = i12;
sArr = sArr2;
j12 = j13 + 1;
audioRecord2 = audioRecord;
} else {
throw new RuntimeException("Something wrong with encoding Speex");
}
} finally {
LibSpeex.nDestroySpxEncoder(nInitSpxEncoder);
Log.i("AudioPttRecorder", "exited");
}
}
bufferedOutputStream.flush();
Log.d("AudioPttRecorder", "finished");
return;
}
My code using JNI directly does similar.
Any ideas what is wrong with the native code?
How should I call Libspx to encode a byte array in native code?

Related

Passing string varaible to native method

I'm trying to pass two strings to a native method, the method doesn't execute while passing strings like this
String pathDir=request.getParameter("path");
String user=request.getParameter("user");
obj.modifyACL(pathDir, user, 'a', 1);
The same method executes well when passing strings as this
obj.modifyACL("C:/Users/margie/Desktop/tb.h", "SYSTEM", 'r', 2);
The value of pathDir and users are not empty while getting the HTML element value. I used
pw.println(user+pathDir);
and it shows like this
SYSTEM C:\Users\margie\Desktop\tb.h
This is native method
PACL SetPerm(LPCTSTR file, string user, char val, int perm, PACL pOldDACL)
{
EXPLICIT_ACCESS eas[1];
PACL pacl = 0;
DWORD rc;
long long int access_val;
LPSTR use = const_cast<char *>(user.c_str());
if(val=='R'||val=='r')
access_val=0x80000000; //GENERIC_READ
if(val=='W'||val=='w')
access_val=0x40000000; //GENERIC_WRITE
if(val=='A'||val=='a')
access_val=0x10000000; //GENERIC_ALL
if(perm==1)
{
eas[0].grfAccessPermissions = access_val;
eas[0].grfAccessMode = GRANT_ACCESS;
eas[0].grfInheritance = NO_INHERITANCE;
eas[0].Trustee.TrusteeForm = TRUSTEE_IS_NAME;
eas[0].Trustee.TrusteeType = TRUSTEE_IS_WELL_KNOWN_GROUP;
eas[0].Trustee.ptstrName = use;
}
else if(perm==2)
{
eas[0].grfAccessPermissions = access_val;
eas[0].grfAccessMode = DENY_ACCESS;
eas[0].grfInheritance = NO_INHERITANCE;
eas[0].Trustee.TrusteeForm = TRUSTEE_IS_NAME;
eas[0].Trustee.TrusteeType = TRUSTEE_IS_WELL_KNOWN_GROUP;
eas[0].Trustee.ptstrName = use;
}
rc = SetEntriesInAcl(1, eas, pOldDACL, &pacl);
if (rc != ERROR_SUCCESS)
{
printf("ERROR---------SetEntriesInAcl: %u\n", rc);
return NULL;
}
rc = SetNamedSecurityInfo((LPSTR)file, SE_FILE_OBJECT,
DACL_SECURITY_INFORMATION | PROTECTED_DACL_SECURITY_INFORMATION,
NULL, NULL, pacl, NULL);
if (rc != ERROR_SUCCESS)
{
printf("ERROR---------SetNamedSecurityInfo: %u\n", rc);
return NULL;
}
cout << "----------------PERMISSION GRANTED----------------\n";
return pacl;
}
JNIEXPORT void JNICALL Java_total_modifyACL(JNIEnv *env, jobject thisObj, jstring inJNIStr, jstring inStr, jchar chr, jint num)
{
cout<<"Inside modifyACL()\n";
PSID pSidOwner = NULL;
DWORD dwRtnCode = 0, dwAcctName = 1, dwDomainName = 1;
HANDLE hFile;
PSECURITY_DESCRIPTOR pSD = NULL;
PACL pOldDACL = NULL;
char ch;
char tp=(char)chr;
int val,i,aceNum;
int perm=(int)num;
const char *input = env->GetStringUTFChars(inJNIStr, NULL);
const char *user = env->GetStringUTFChars(inStr, NULL);
// Get the handle of the file object.
hFile = CreateFile(
input,
GENERIC_READ,
FILE_SHARE_READ,
NULL,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
NULL);
// Check GetLastError for CreateFile error code.
if (hFile == INVALID_HANDLE_VALUE) {
DWORD dwErrorCode = 0;
dwErrorCode = GetLastError();
cout << "CreateFile error = " << dwErrorCode<<". Possibly NO file exist in the given path or READ Access denied.";
exit;
}
// Get the SID of the file.
dwRtnCode = GetSecurityInfo(
hFile,
SE_FILE_OBJECT,
DACL_SECURITY_INFORMATION | OWNER_SECURITY_INFORMATION,
&pSidOwner,
NULL,
&pOldDACL,
NULL,
&pSD);
// Check GetLastError for GetSecurityInfo error condition.
if (dwRtnCode != ERROR_SUCCESS) {
DWORD dwErrorCode = 0;
dwErrorCode = GetLastError();
cout << "GetSecurityInfo error = " << dwErrorCode;
}
pOldDACL = SetPerm(input,user,tp,perm,pOldDACL); //passing pOldDACL to add the ACE in exixting ACE.
}
What should I do to make modifyACL() work on passing user input string?

Signal 7 (SIGBUS), code 1 (BUS_ADRALN), fault addr

I'm trying to do stitching on android. I used Surf algorithm to serve as finder and I got an error like "try enable OPENCV_ENABLED_NONFREE". I use C++ native code on Android Studio and do stitching in background. I solved the surf error but now, when I test this application on real device, I get this error:
I did some research in another forum, and they say that the problem may be caused by a function that doesn't return the value it's supposed to return. Is there anyone who can help me please. This is the code:
private Handler handler = new Handler(new Handler.Callback() {
#Override
public boolean handleMessage(Message message) {
switch (message.what) {
case HANDLER_START_STITCHING :
{
new Thread()
{
public void run()
{
AsyncTask.execute(new Runnable() {
#Override
public void run() {
String[] source=getDirectoryFilelist("null");
final File resultDir = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + File.separator + "viewwer");
// if (!resultDir.exists())
// resultDir.mkdir();
final String stitchingResultImagePath = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath()) +"/result.jpg"; // + (ANGLE-90) + ".jpg";
if( NativePanorama.jniStitching(source, stitchingResultImagePath, STITCH_IMAGE_SCALE) == 0 )
{
handler.sendMessage(handler.obtainMessage(HANDLER_SET_STITCHING_BUTTON_TEXT,"Stitching success"));
File image = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath());
File result_90 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result90.jpg");
File result_180 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result180.jpg");
File result_270 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result270.jpg");
File result_360 = new File(getApplicationContext().getExternalFilesDir(null).getAbsolutePath() + "/result360.jpg");
Log.d("GESTION_STITCHING", result_180.toString());
/*if (ANGLE == 450) {
handler.sendMessage(handler.obtainMessage(HANDLER_FINISH_STITCHING,"Stitching success"));
}*/
if (image.exists()) {
File[] files = image.listFiles();
for (int i=0;i<files.length; i++) {
if (files[i].compareTo(result_90) == 0 || files[i].compareTo(result_180) == 0 || files[i].compareTo(result_270) == 0 || files[i].compareTo(result_360) == 0) {
} else {
files[i].delete();
}
}
}
}
else
{
handler.sendMessage(handler.obtainMessage(HANDLER_SET_STITCHING_BUTTON_TEXT,"Stitching error"));
}
}
});
}
}.start();
break;
}
and the following is the native C++ code:
JNIEXPORT jint JNICALL Java_com_priscilla_viewwer_utils_NativePanorama_jniStitching(JNIEnv *env, jclass clazz, jobjectArray source, jstring result, jdouble scale) {
clock_t beginTime, endTime;
double timeSpent;
beginTime = clock();
//init jni call java method
int i = 0;
bool try_use_gpu = true;
vector<Mat> imgs;
Mat img;
Mat img_scaled;
Mat pano;
Mat pano_tocut;
Mat gray;
const char* result_name = env->GetStringUTFChars(result, JNI_FALSE); //convert result
LOGE("result_name=%s",result_name);
LOGE("scale=%f",scale);
int imgCount = env->GetArrayLength(source); //img count
LOGE("source imgCount=%d",imgCount);
for(i=0;i<imgCount;i++)
{
jstring jsource = (jstring)(env->GetObjectArrayElement(source, i));
const char* source_name = env->GetStringUTFChars(jsource, JNI_FALSE); //convert jsource
LOGE("Add index %d source_name=:%s", i, source_name);
img=imread(source_name);
Size dsize = Size((int)(img.cols*scale),(int)(img.rows*scale));
img_scaled = Mat(dsize,CV_32S);
resize(img,img_scaled,dsize);
imgs.push_back(img_scaled);
env->ReleaseStringUTFChars(jsource, source_name); //release convert jsource
}
img.release();
pano = stitchingImages(imgs);
for(i=0;i<imgs.size();i++)
{
imgs[i].release();
}
//cut black edges
//LOGE("stitching success,cutting black....");
pano_tocut = pano;
cvtColor(pano_tocut, gray, CV_BGR2GRAY);
Rect startROI(0,0,gray.cols,gray.rows); // start as the source image - ROI is the complete SRC-Image
cropLargestPossibleROI(gray,pano_tocut,startROI);
gray.release();
imwrite(result_name, pano_tocut);
pano.release();
pano_tocut.release();
env->ReleaseStringUTFChars(result, result_name); //release convert result
endTime = clock();
timeSpent = (double)(endTime - beginTime) / CLOCKS_PER_SEC;
LOGE("success,total cost time %f seconds",timeSpent);
// env->CallVoidMethod(clazz, javaMethodRef, timeSpent);
return 0;
}
I came cross this issues either, and I found it was violating strict aliasing.
cause by casting~
problem:
uint32_t i32 = *((uint32_t*)m_data);
solution:
uint32_t i32 = 0;
char* p = (char*)&i32;
for(int i =0;i < 4;i++)
{
p[i] = m_data[i];
}

How to measure performance/speed of Files.copy method while the process is on-going?

I have a general abstract method which gets an input stream (can be from a network socket, or from a file on the local storage) and saves the data on the disk.
Below a small snippet of the function:
fun saveToFile(data: InputStream, fileDestination: File) {
val bytesWritten = Files.copy(data, fileDestination.toPath(), StandardCopyOption.REPLACE_EXISTING)
println("$bytesWritten bytes were saved at ${fileDestination.absolutePath}")
}
Is it possible to measure the speed/rate that the data are being saved on the disk while the process/method is on-going? For example, is there any possibility of invoking a function which returns the rate/speed or updates an object which holds that data?
If I was doing the implementation by myself with InputStream/OutputStream, I could have for example something like below:
fun saveData(data: InputStream, fileDestination: File, measureSpeed : (Statistics) -> Unit = { }) {
val outputStream = fileDestination.outputStream()
val maxBufferSize = 1024
val totalAmountData = data.available()
var totalBytesWritten = 0
var bytesWriteNextIteration: Int // amount of bytes that will be sent in only one write call
val statistics = Statistics(amountSent = 0, lastWriteBytes = 0, lastWriteTime = 1)
while (totalBytesWritten < totalAmountData) {
bytesWriteNextIteration = totalAmountData - totalBytesWritten
if (bytesWriteNextIteration > maxBufferSize) {
bytesWriteNextIteration = maxBufferSize
}
val bytes = ByteArray(bytesWriteNextIteration)
val nano = measureNanoTime {
outputStream.write(bytes)
}
statistics.amountSent = totalBytesWritten.toLong()
statistics.lastWriteBytes = bytesWriteNextIteration.toLong()
statistics.lastWriteTime = nano
measureSpeed(statistics)
totalBytesWritten += bytesWriteNextIteration
}
outputStream.flush()
outputStream.close()
}
data class Statistics(var amountSent: Long, var lastWriteBytes: Long, var lastWriteTime: Long)
and with measureSpeed method to calculate the copy/transfer rate.
Since I didn't find anything in-built, the easiest way to do what's being asked is to "overload" the desired Files.copy method and call that function instead.
The overloading method could be similar to the below:
private val BUFFER_SIZE = 8192
#Throws(IOException::class)
private fun copy(source: InputStream, sink: OutputStream, networkStatistics: NetworkStatistics, measureSpeed : (NetworkStatistics) -> Unit = { }): Long {
var nread = 0L
val buf = ByteArray(BUFFER_SIZE)
var n: Int
n = source.read(buf)
while (n > 0) {
val nano = measureNanoTime {
sink.write(buf, 0, n)
nread += n.toLong()
n = source.read(buf)
}
networkStatistics.amountSent = nread
networkStatistics.lastPacketBytes = n.toLong()
networkStatistics.lastPacketTime = nano
measureSpeed(networkStatistics)
}
return nread
}
#Throws(IOException::class)
fun copy(`in`: InputStream, target: Path, networkStatistics: NetworkStatistics, measureSpeed : (NetworkStatistics) -> Unit = { }, vararg options: CopyOption ): Long {
// ensure not null before opening file
Objects.requireNonNull(`in`)
// check for REPLACE_EXISTING
var replaceExisting = false
for (opt in options) {
if (opt === StandardCopyOption.REPLACE_EXISTING) {
replaceExisting = true
} else {
if (opt == null) {
throw NullPointerException("options contains 'null'")
} else {
throw UnsupportedOperationException(opt.toString() + " not supported")
}
}
}
// attempt to delete an existing file
var se: SecurityException? = null
if (replaceExisting) {
try {
Files.deleteIfExists(target)
} catch (x: SecurityException) {
se = x
}
}
// attempt to create target file. If it fails with
// FileAlreadyExistsException then it may be because the security
// manager prevented us from deleting the file, in which case we just
// throw the SecurityException.
val ostream: OutputStream
try {
ostream = Files.newOutputStream(target, StandardOpenOption.CREATE_NEW,
StandardOpenOption.WRITE)
} catch (x: FileAlreadyExistsException) {
if (se != null)
throw se
// someone else won the race and created the file
throw x
}
// do the copy
ostream.use { out -> return copy(`in`, out, networkStatistics, measureSpeed = { networkStatistics -> measureSpeed(networkStatistics) }) }
}
and it would be called as:
val statistics = NetworkStatistics(responseShouldBe, 0, 0, 1)
copy(inputStream, file.toPath(), statistics, { it: NetworkStatistics -> measureSpeed(it) }, StandardCopyOption.REPLACE_EXISTING)
private fun measureSpeed(stats: NetworkStatistics) {
val a = stats.lastPacketBytes
val b = stats.lastPacketTime
val miliseconds = b.toDouble() / 1000
val seconds = miliseconds / 1000
println("$a per ($seconds seconds) or ($miliseconds milisecs) or ($b nanosecs) -- ${(a.toDouble()/(1024*1024))/seconds} MB/seconds")
}

Audio merging using OpenSL ES Android

I am trying to record my vocals and then merge them with an audio file together using OPENSL ES Library. I found this GitHub sample called Native-Audio. It merges the two audios. But the background audio file is playing much faster than the actual rate in the final output.
Please use headphones to notice the difference.
Samples Links: Before and After
Also, it uses the files from the assets folder only. How can I manually select MP3 files from file manager?
private void mixAudio(){
try {
if (!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) ||
!(ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED))
{
// Show rationale and request permission.
ActivityCompat.requestPermissions(this,
new String[]{android.Manifest.permission.READ_EXTERNAL_STORAGE, android.Manifest.permission.WRITE_EXTERNAL_STORAGE},
1000);
}
else {
buttonMix.setEnabled(false);
buttonMix.setText("MIXING....");
textViewMixPath.setText("");
buttonPlay.setEnabled(false);
buttonRecord.setEnabled(false);
buttonStart.setEnabled(false);
listView.setEnabled(false);
Thread thread = new Thread(new Runnable() {
#Override
public void run() {
try{
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
//File file = new File(baseDir + "/mix.wav");
String path = Environment.getExternalStorageDirectory().getPath() + "/VocalRecorder";
File fileParent = new File(path);
if (!fileParent.exists()){
fileParent.mkdir();
}
final File file = new File(fileParent.getPath() + "/mix.wav");
//String author = getApplicationContext().getPackageName() + ".provider";
//Uri videoUri = FileProvider.get(this, author, mediaFile);
//final File file = new File(getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + "mix.wav");
//MediaMuxer muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
String beat = beats[selectedBeat];
//beat = beat.replace(".wav", ".mp3");
AssetFileDescriptor afd = getAssets().openFd(beat);
MediaCodec codec = null;
//ByteBuffer outputBuffer;
//short[] data; // data for the AudioTrack playback
//int outputBufferIndex = -1;
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
final long duration = Long.parseLong(durationStr);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
// right now I am pointing to a URI but I have tested that both will
// play the media file using MediaPlayer
int sampleRate = 0;
int numChannels = 0;
int dstIndex = -1;
int numTracks = extractor.getTrackCount(); //This says 1
for (int i = 0; i < numTracks; ++i) { // so this will just run once
MediaFormat format = extractor.getTrackFormat(i); // getting info so it looks good so far
String mime = format.getString(MediaFormat.KEY_MIME); // "audio/mpeg"
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, null, 0);
//format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
//dstIndex = muxer.addTrack(format);
//writer.setFrameRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
//writer.setSamplesPerFrame(format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
//writer.setBitsPerSample(16);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
break;
}
}
// Calculate the number of frames required for specified duration
long numFrames = (long)(duration * sampleRate/1000);
// Create a wav file with the name specified as the first argument
WavFile wavFile = WavFile.newWavFile(file, numChannels, numFrames, 16, sampleRate);
if (codec == null) {
throw new IllegalArgumentException("No decoder for file format");
}
//ByteBuffer[] inputBuffers = decoder.getInputBuffers();
//ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
/*
Boolean eosReceived = false;
while (!eosReceived) {
int inIndex = decoder.dequeueInputBuffer(1000);
if (inIndex >= 0) {
ByteBuffer buffer = decoder.getInputBuffer(inIndex);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to mDecoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
int outIndex = decoder.dequeueOutputBuffer(info, 1000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
//outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = decoder.getOutputFormat();
Log.d("DecodeActivity", "New format " + format);
//audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer outBuffer = decoder.getOutputBuffer(outIndex);
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer);
final byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
//audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
decoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
}
*/
short recordedData[] = recordedData();
int recordMixStartIndex = -1;
//muxer.start();
codec.start();
Boolean sawInputEOS = false;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo infoMux = new MediaCodec.BufferInfo();
int count = 0;
while (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_US);
Log.i(LOG_TAG, "inputBufIndex : " + inputBufIndex);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffer(inputBufIndex);
int sampleSize = extractor.readSampleData(dstBuf, 0);
Log.i(LOG_TAG, "sampleSize : " + sampleSize);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.i(LOG_TAG, "Saw input end of stream!");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
Log.i(LOG_TAG, "presentationTimeUs " + presentationTimeUs);
}
codec.queueInputBuffer(inputBufIndex,
0, //offset
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
Log.i(LOG_TAG, "extractor.advance()");
extractor.advance();
}
}
final int res = codec.dequeueOutputBuffer(info, TIMEOUT_US);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
//final byte[] chunk = new byte[info.size];
//buf.get(chunk); // Read the buffer all at once
short[] shortArray = new short[info.size/2];
buf.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArray);
buf.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
if (shortArray.length > 0) {
//mAudioTrack.write(chunk, 0, chunk.length);
//infoMux.presentationTimeUs = info.presentationTimeUs;
//infoMux.flags = info.flags;
//muxer.writeSampleData(dstIndex, ByteBuffer.wrap(chunk),
// infoMux);
long []longData = new long[shortArray.length];
// Merge data with vocal
// Calculate the time
final long bufferTimer = info.presentationTimeUs/1000;
int vocalCount = 0;
for (int i = 0; i < shortArray.length; i ++) {
//writer.writeShortLittle(shortArray[i]);
long offsetTime = i*1000/(sampleRate*2); // 2 channels
Boolean mixed = false;
if ((offsetTime + bufferTimer > recordStartTime) && (offsetTime + bufferTimer <= recordStopTime + 500)){
if (recordMixStartIndex == -1){
recordMixStartIndex = 0;
}
if (recordMixStartIndex < recordedData.length){
//Log.i("TAG", "############ mix record data: " + recordMixStartIndex);
longData[i] = TPMixSamples((int)(recordedData[recordMixStartIndex]), (int)shortArray[i]/3);
if (vocalCount >= 3) {
recordMixStartIndex++;
vocalCount = 0;
}
else{
vocalCount ++;
}
mixed = true;
}
}
else {
// All done, set sawInputEOS to stop mixing
if (bufferTimer > recordStopTime + 500){
sawInputEOS = true;
}
}
if (!mixed) {
longData[i] = shortArray[i];
}
}
Log.i("TAG", "############ write frames: " + longData.length/2);
wavFile.writeFrames(longData, longData.length/2);
count ++;
if (count % 5 == 0){
runOnUiThread(new Runnable() {
#Override
public void run() {
long percent = bufferTimer*100/duration;
buttonMix.setText("MIXING..." + percent + "%");
}
});
}
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawInputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//codecOutputBuffers = codec.getOutputBuffers();
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
Log.d(LOG_TAG, "Output format has changed to " + oformat);
//mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
// Close the wavFile
wavFile.close();
// muxer.stop();
// muxer.release();
codec.stop();
codec.release();
extractor.release();
runOnUiThread(new Runnable() {
#Override
public void run() {
buttonMix.setText("MIX DONE");
buttonPlay.setEnabled(true);
buttonRecord.setEnabled(true);
textViewMixPath.setText(file.getPath());
buttonStart.setEnabled(true);
listView.setEnabled(true);
}
});
}
catch (Exception e){
}
}
});
thread.start();
}
}
catch (Exception e){
e.printStackTrace();
}
}
private final int INT16_MIN = - 32768;
private final int INT16_MAX = 32767;
private long TPMixSamples(int a, int b) {
if (a > INT16_MAX) {a = INT16_MAX;}
if (a < INT16_MIN) {a = INT16_MIN;}
return
// If both samples are negative, mixed signal must have an amplitude between the lesser of A and B, and the minimum permissible negative amplitude
a < 0 && b < 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MIN) :
// If both samples are positive, mixed signal must have an amplitude between the greater of A and B, and the maximum permissible positive amplitude
( a > 0 && b > 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MAX)
// If samples are on opposite sides of the 0-crossing, mixed signal should reflect that samples cancel each other out somewhat
:
a + b);
}
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native void createBufferQueueAudioPlayer(int sampleRate, int samplesPerBuf);
/////
public static native boolean createAssetAudioPlayer(AssetManager assetManager, String filename);
// true == PLAYING, false == PAUSED
public static native void setPlayingAssetAudioPlayer(boolean isPlaying);
public static native int getDurationAssetAudioPlayer();
public static native int getCurrentPositionAssetAudioPlayer();
//////
public static native boolean createUriAudioPlayer(String uri);
public static native void setPlayingUriAudioPlayer(boolean isPlaying);
public static native void setLoopingUriAudioPlayer(boolean isLooping);
public static native void setChannelMuteUriAudioPlayer(int chan, boolean mute);
public static native void setChannelSoloUriAudioPlayer(int chan, boolean solo);
public static native int getNumChannelsUriAudioPlayer();
public static native void setVolumeUriAudioPlayer(int millibel);
public static native void setMuteUriAudioPlayer(boolean mute);
public static native void enableStereoPositionUriAudioPlayer(boolean enable);
public static native void setStereoPositionUriAudioPlayer(int permille);
public static native boolean selectClip(int which, int count);
public static native void stopClip();
public static native boolean enableReverb(boolean enabled);
public static native boolean createAudioRecorder();
public static native void startRecording();
public static native void stopRecording();
public static native void pauseRecording();
public static native void resumeRecording();
public static native short[] recordedData();
public static native double recordedDuration();
public static native void shutdown();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-audio-jni");
}

Custom OpenVR driver, with jittery android rotation

I'm working on a simple (I thought) OpenVR driver that sends the compositors output to my android phone to display with google cardboard. I've got the basics working but when my driver receives the rotation data from my android device it's very noisy and with my phone sat still on my desk it jitters loads.
the floats are being sent over in network byte order.
Here is the code receiving the rotation data:
float BytesToFloat(unsigned char* buffer, int start)
{
float f = (buffer[start] << 24) | (buffer[start + 1] << 16) | (buffer[start + 2] << 8) | buffer[start + 3];
return f;
}
void MobileDeviceReceiver::PoseThread(std::function<void(MobileVector*)> PoseUpdateCallback)
{
sockaddr_in client;
int inLen,clientStructLen = sizeof(client);
MobileVector currentPose;
DriverLog("started pose tracking thread %d\n",m_isActive);
bool errorLastCall = false;
char pchBuffer[65535];
while (m_isActive)
{
if ((inLen = recvfrom(m_socket, pchBuffer, 65535, 0, (sockaddr*)&client, &clientStructLen)) == SOCKET_ERROR)
{
if (errorLastCall == false)
{
DriverLog("Error receiving data: %d\n", WSAGetLastError());
}
errorLastCall = true;
}
else {
errorLastCall = false;
}
currentPose.x = floorf(BytesToFloat((unsigned char*)pchBuffer, 0)*10000)/10000;
currentPose.y = 0;//BytesToFloat((unsigned char*)pchBuffer, 4);
currentPose.z = 0;//BytesToFloat((unsigned char*)pchBuffer, 8);
PoseUpdateCallback(&currentPose);
}
}
And the code sending it from the phone:
class Task implements Runnable
{
public Queue<DatagramPacket> packets = new LinkedList<DatagramPacket>();
private boolean isActive = true;
#Override
public void run() {
try{
DatagramSocket socket = new DatagramSocket();
while(isActive)
{
if(packets.size() > 0)
{
socket.send(packets.remove());
}
}
}catch(Exception e)
{
e.printStackTrace();
}
}
public void Kill()
{
isActive = false;
}
}
ByteBuffer buffer = ByteBuffer.allocate(12);
protected float ALPHA = 0.05f;
private float[] lowPassFilter(float[] input,float[] output)
{
if(output == null) return input;
for ( int i=0; i<input.length; i++ ) {
output[i] = output[i] + ALPHA * (input[i] - output[i]);
}
return output;
}
#Override
public void onSensorChanged(SensorEvent sensorEvent) {
if(sensorEvent.sensor == sRotation)
{
float x,y,z;
x = (float) Math.toRadians(sensorEvent.values[0]);
y = (float) Math.toRadians(sensorEvent.values[1]);
z = (float) Math.toRadians(sensorEvent.values[2]);
float[] values = lowPassFilter(new float[]{x,y,z},new float[3]);
buffer.order(ByteOrder.BIG_ENDIAN);
buffer.putFloat(values[0]);
buffer.putFloat(values[1]);
buffer.putFloat(values[2]);
try {
DatagramPacket packet = new DatagramPacket(buffer.array(), 12, InetAddress.getByName(IP), 8888);
if(task != null) {
task.packets.add(packet);
}
}catch(Exception e)
{
e.printStackTrace();
}
buffer.clear();
}
}
Thanks in advance
For future reference I was doing two things wrong:
I was sending the float in network byteorder and not rearranging it to native byte order on the local machine.
Bitshifts don't seem to work for floats, so I used memset instead.
After changing these two things it all worked perfectly.

Categories

Resources