Use byte buffer for audio to minimize buffer copy's

This commit is contained in:
Iwan Timmer
2014-01-07 20:43:14 +01:00
parent 815e56d7d8
commit 85ffdc2426
5 changed files with 43 additions and 85 deletions

View File

@@ -37,8 +37,8 @@ int nv_opus_get_channel_count(void) {
} }
// This number assumes 2 channels at 48 KHz // This number assumes 2 channels at 48 KHz
int nv_opus_get_max_out_shorts(void) { int nv_opus_get_max_out_bytes(void) {
return 512*nv_opus_get_channel_count(); return 1024*nv_opus_get_channel_count();
} }
// The Opus stream is 48 KHz // The Opus stream is 48 KHz
@@ -46,17 +46,20 @@ int nv_opus_get_sample_rate(void) {
return 48000; return 48000;
} }
// outpcmdata must be 5760*2 shorts in length // outpcmdata must be 11520*2 bytes in length
// packets must be decoded in order // packets must be decoded in order
// a packet loss must call this function with NULL indata and 0 inlen // a packet loss must call this function with NULL indata and 0 inlen
// returns the number of decoded samples // returns the number of decoded samples
int nv_opus_decode(unsigned char* indata, int inlen, short* outpcmdata) { int nv_opus_decode(unsigned char* indata, int inlen, unsigned char* outpcmdata) {
int err; int err;
// Decoding to 16-bit PCM with FEC off // Decoding to 16-bit PCM with FEC off
// Maximum length assuming 48KHz sample rate // Maximum length assuming 48KHz sample rate
err = opus_decode(decoder, indata, inlen, err = opus_decode(decoder, indata, inlen,
outpcmdata, 512, 0); (opus_int16*) outpcmdata, 512, 0);
if (err>0)
err = err * 2;
return err; return err;
} }

View File

@@ -1,6 +1,6 @@
int nv_opus_init(void); int nv_opus_init(void);
void nv_opus_destroy(void); void nv_opus_destroy(void);
int nv_opus_get_channel_count(void); int nv_opus_get_channel_count(void);
int nv_opus_get_max_out_shorts(void); int nv_opus_get_max_out_bytes(void);
int nv_opus_get_sample_rate(void); int nv_opus_get_sample_rate(void);
int nv_opus_decode(unsigned char* indata, int inlen, short* outpcmdata); int nv_opus_decode(unsigned char* indata, int inlen, unsigned char* outpcmdata);

View File

@@ -25,8 +25,8 @@ Java_com_limelight_nvstream_av_audio_OpusDecoder_getChannelCount(JNIEnv *env, jo
// This number assumes 2 channels at 48 KHz // This number assumes 2 channels at 48 KHz
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_com_limelight_nvstream_av_audio_OpusDecoder_getMaxOutputShorts(JNIEnv *env, jobject this) { Java_com_limelight_nvstream_av_audio_OpusDecoder_getMaxOutputBytes(JNIEnv *env, jobject this) {
return nv_opus_get_max_out_shorts(); return nv_opus_get_max_out_bytes();
} }
// The Opus stream is 48 KHz // The Opus stream is 48 KHz
@@ -43,13 +43,13 @@ JNIEXPORT jint JNICALL
Java_com_limelight_nvstream_av_audio_OpusDecoder_decode( Java_com_limelight_nvstream_av_audio_OpusDecoder_decode(
JNIEnv *env, jobject this, // JNI parameters JNIEnv *env, jobject this, // JNI parameters
jbyteArray indata, jint inoff, jint inlen, // Input parameters jbyteArray indata, jint inoff, jint inlen, // Input parameters
jshortArray outpcmdata) // Output parameter jbyteArray outpcmdata) // Output parameter
{ {
jint ret; jint ret;
jbyte* jni_input_data; jbyte* jni_input_data;
jshort* jni_pcm_data; jbyte* jni_pcm_data;
jni_pcm_data = (*env)->GetShortArrayElements(env, outpcmdata, 0); jni_pcm_data = (*env)->GetByteArrayElements(env, outpcmdata, 0);
if (indata != NULL) { if (indata != NULL) {
jni_input_data = (*env)->GetByteArrayElements(env, indata, 0); jni_input_data = (*env)->GetByteArrayElements(env, indata, 0);
@@ -62,7 +62,7 @@ Java_com_limelight_nvstream_av_audio_OpusDecoder_decode(
ret = nv_opus_decode(NULL, 0, jni_pcm_data); ret = nv_opus_decode(NULL, 0, jni_pcm_data);
} }
(*env)->ReleaseShortArrayElements(env, outpcmdata, jni_pcm_data, 0); (*env)->ReleaseByteArrayElements(env, outpcmdata, jni_pcm_data, 0);
return ret; return ret;
} }

View File

@@ -6,8 +6,10 @@ import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException; import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine; import javax.sound.sampled.SourceDataLine;
import com.limelight.nvstream.av.ShortBufferDescriptor; import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.audio.AudioRenderer; import com.limelight.nvstream.av.audio.AudioRenderer;
import java.nio.ByteOrder;
import java.util.LinkedList;
/** /**
* Audio renderer implementation * Audio renderer implementation
@@ -16,7 +18,7 @@ import com.limelight.nvstream.av.audio.AudioRenderer;
public class JavaxAudioRenderer implements AudioRenderer { public class JavaxAudioRenderer implements AudioRenderer {
private SourceDataLine soundLine; private SourceDataLine soundLine;
private SoundBuffer soundBuffer; private LinkedList<ByteBufferDescriptor> soundBuffer;
private byte[] lineBuffer; private byte[] lineBuffer;
private int channelCount; private int channelCount;
private int sampleRate; private int sampleRate;
@@ -33,16 +35,25 @@ public class JavaxAudioRenderer implements AudioRenderer {
* @param length the length of data to be rendered * @param length the length of data to be rendered
*/ */
@Override @Override
public void playDecodedAudio(short[] pcmData, int offset, int length) { public void playDecodedAudio(byte[] pcmData, int offset, int length) {
if (soundLine != null) { if (soundLine != null) {
// Queue the decoded samples into the staging sound buffer // Queue the decoded samples into the staging sound buffer
soundBuffer.queue(new ShortBufferDescriptor(pcmData, offset, length)); if (soundBuffer.size() > STAGING_BUFFERS) {
soundBuffer.removeFirst();
}
soundBuffer.addLast(new ByteBufferDescriptor(pcmData, offset, length));
int available = soundLine.available(); int available = soundLine.available();
if (reallocateLines) { if (reallocateLines) {
// Kinda jank. If the queued is larger than available, we are going to have a delay // Kinda jank. If the queued is larger than available, we are going to have a delay
// so we increase the buffer size // so we increase the buffer size
if (available < soundBuffer.size()) { int size = 0;
for (ByteBufferDescriptor desc : soundBuffer) {
size += desc.length;
}
if (available < size) {
System.out.println("buffer too full, buffer size: " + soundLine.getBufferSize()); System.out.println("buffer too full, buffer size: " + soundLine.getBufferSize());
int currentBuffer = soundLine.getBufferSize(); int currentBuffer = soundLine.getBufferSize();
soundLine.close(); soundLine.close();
@@ -60,11 +71,16 @@ public class JavaxAudioRenderer implements AudioRenderer {
// If there's space available in the sound line, pull some data out // If there's space available in the sound line, pull some data out
// of the staging buffer and write it to the sound line // of the staging buffer and write it to the sound line
if (available > 0) {
int written = soundBuffer.fill(lineBuffer, 0, available); while (available > 0 && !soundBuffer.isEmpty()) {
if (written > 0) { ByteBufferDescriptor buff = soundBuffer.peek();
soundLine.write(lineBuffer, 0, written); if (buff.length > available) {
break;
} }
available -= soundLine.write(buff.data, buff.offset, buff.length);
soundBuffer.remove();
} }
} }
} }
@@ -80,7 +96,7 @@ public class JavaxAudioRenderer implements AudioRenderer {
} }
private void createSoundLine(int bufferSize) { private void createSoundLine(int bufferSize) {
AudioFormat audioFormat = new AudioFormat(sampleRate, 16, channelCount, true, true); AudioFormat audioFormat = new AudioFormat(sampleRate, 16, channelCount, true, ByteOrder.nativeOrder()==ByteOrder.BIG_ENDIAN);
DataLine.Info info; DataLine.Info info;
@@ -103,7 +119,7 @@ public class JavaxAudioRenderer implements AudioRenderer {
soundLine.start(); soundLine.start();
lineBuffer = new byte[soundLine.getBufferSize()]; lineBuffer = new byte[soundLine.getBufferSize()];
soundBuffer = new SoundBuffer(STAGING_BUFFERS); soundBuffer = new LinkedList<ByteBufferDescriptor>();
} catch (LineUnavailableException e) { } catch (LineUnavailableException e) {
soundLine = null; soundLine = null;
} }

View File

@@ -1,61 +0,0 @@
package com.limelight.binding.audio;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.LinkedList;
import com.limelight.nvstream.av.ShortBufferDescriptor;
public class SoundBuffer {
private LinkedList<ShortBufferDescriptor> bufferList;
private int maxBuffers;
public SoundBuffer(int maxBuffers) {
this.bufferList = new LinkedList<ShortBufferDescriptor>();
this.maxBuffers = maxBuffers;
}
public void queue(ShortBufferDescriptor buff) {
if (bufferList.size() > maxBuffers) {
bufferList.removeFirst();
}
bufferList.addLast(buff);
}
public int size() {
int size = 0;
for (ShortBufferDescriptor desc : bufferList) {
size += desc.length;
}
return size;
}
public int fill(byte[] data, int offset, int length) {
int filled = 0;
// Convert offset and length to be relative to shorts
offset /= 2;
length /= 2;
ShortBuffer sb = ByteBuffer.wrap(data).asShortBuffer();
sb.position(offset);
while (length > 0 && !bufferList.isEmpty()) {
ShortBufferDescriptor buff = bufferList.getFirst();
if (buff.length > length) {
break;
}
sb.put(buff.data, buff.offset, buff.length);
length -= buff.length;
filled += buff.length;
bufferList.removeFirst();
}
// Return bytes instead of shorts
return filled * 2;
}
}