Drop audio frames if the backlog becomes too large. Trim buffer pools when memory runs low. Optimize AVC decoding a bit more.

This commit is contained in:
Cameron Gutman 2013-11-10 15:25:00 -05:00
parent fc66caf567
commit 524cab4115
8 changed files with 80 additions and 18 deletions

View File

@ -4,6 +4,7 @@ import com.limelight.nvstream.NvConnection;
import com.limelight.nvstream.input.NvControllerPacket;
import android.app.Activity;
import android.content.ComponentCallbacks2;
import android.os.Bundle;
import android.view.InputDevice;
import android.view.KeyEvent;
@ -68,11 +69,21 @@ public class Game extends Activity implements OnGenericMotionListener, OnTouchLi
super.onPause();
}
@Override
public void onDestroy() {
conn.stop();
super.onDestroy();
}
@Override
public void onTrimMemory(int trimLevel) {
if (trimLevel >= ComponentCallbacks2.TRIM_MEMORY_RUNNING_LOW)
{
System.out.println("Trimming for level: "+trimLevel);
conn.trim();
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {

View File

@ -105,6 +105,11 @@ public class NvAudioStream {
session.addParticipant(new Participant(host, RTP_PORT, 0));
}
public void trim()
{
depacketizer.trim();
}
private void setupAudio()
{
int channelConfig;

View File

@ -79,6 +79,12 @@ public class NvConnection {
inputStream = null;
}
}
public void trim()
{
videoStream.trim();
audioStream.trim();
}
public void start()
{

View File

@ -20,6 +20,7 @@ import com.limelight.nvstream.av.video.AvVideoPacket;
import jlibrtp.Participant;
import jlibrtp.RTPSession;
import android.content.ComponentCallbacks2;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
@ -78,6 +79,11 @@ public class NvVideoStream {
threads.clear();
}
public void trim()
{
depacketizer.trim();
}
private InputStream openFirstFrameInputStream(String host) throws UnknownHostException, IOException
{
Socket s = new Socket(host, FIRST_FRAME_PORT);
@ -122,7 +128,6 @@ public class NvVideoStream {
public void setupDecoders(Surface surface)
{
videoDecoder = MediaCodec.createDecoderByType("video/avc");
//videoDecoder = MediaCodec.createByCodecName("OMX.google.h264.decoder");
MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
videoDecoder.configure(videoFormat, surface, null, 0);

View File

@ -11,6 +11,11 @@ public class AvByteBufferPool {
this.bufferSize = size;
}
public synchronized void purge()
{
this.bufferList = new LinkedList<byte[]>();
}
public synchronized byte[] allocate()
{
if (bufferList.isEmpty())

View File

@ -11,6 +11,11 @@ public class AvShortBufferPool {
this.bufferSize = size;
}
public synchronized void purge()
{
this.bufferList = new LinkedList<short[]>();
}
public synchronized short[] allocate()
{
if (bufferList.isEmpty())

View File

@ -9,13 +9,18 @@ import com.limelight.nvstream.av.AvShortBufferPool;
public class AvAudioDepacketizer {
private LinkedBlockingQueue<AvShortBufferDescriptor> decodedUnits =
new LinkedBlockingQueue<AvShortBufferDescriptor>();
new LinkedBlockingQueue<AvShortBufferDescriptor>(15);
private AvShortBufferPool pool = new AvShortBufferPool(OpusDecoder.getMaxOutputShorts());
private AvShortBufferPool pool = new AvShortBufferPool(512);
// Sequencing state
private short lastSequenceNumber;
public void trim()
{
pool.purge();
}
public void decodeInputData(AvRtpPacket packet)
{
short seq = packet.getSequenceNumber();
@ -50,9 +55,13 @@ public class AvAudioDepacketizer {
decodeLen *= OpusDecoder.getChannelCount();
// Put it on the decoded queue
decodedUnits.add(new AvShortBufferDescriptor(pcmData, 0, decodeLen));
if (!decodedUnits.offer(new AvShortBufferDescriptor(pcmData, 0, decodeLen)))
{
pool.free(pcmData);
}
}
else {
System.out.println("decode failed: "+decodeLen);
pool.free(pcmData);
}
}

View File

@ -29,6 +29,11 @@ public class AvVideoDepacketizer {
return pool.allocate();
}
public void trim()
{
pool.purge();
}
private void clearAvcNalState()
{
if (avcNalDataChain != null)
@ -114,7 +119,10 @@ public class AvVideoDepacketizer {
// Construct the H264 decode unit
AvDecodeUnit du = new AvDecodeUnit(AvDecodeUnit.TYPE_H264, avcNalDataChain, avcNalDataLength, flags);
decodedUnits.add(du);
if (!decodedUnits.offer(du))
{
releaseDecodeUnit(du);
}
// Clear old state
avcNalDataChain = null;
@ -172,25 +180,33 @@ public class AvVideoDepacketizer {
// Move to the next special sequence
while (location.length != 0)
{
specialSeq = NAL.getSpecialSequenceDescriptor(location);
// Check if this should end the current NAL
if (specialSeq != null)
// Catch the easy case first where byte 0 != 0x00
if (location.data[location.offset] == 0x00)
{
break;
specialSeq = NAL.getSpecialSequenceDescriptor(location);
// Check if this should end the current NAL
if (specialSeq != null)
{
// Only stop if we're decoding something or this
// isn't padding
if (currentlyDecoding != AvDecodeUnit.TYPE_UNKNOWN ||
!NAL.isPadding(specialSeq))
{
break;
}
}
}
else
{
// This byte is part of the NAL data
location.offset++;
location.length--;
}
}
AvByteBufferDescriptor data = new AvByteBufferDescriptor(location.data, start, location.offset-start);
// This byte is part of the NAL data
location.offset++;
location.length--;
}
if (currentlyDecoding == AvDecodeUnit.TYPE_H264 && avcNalDataChain != null)
{
AvByteBufferDescriptor data = new AvByteBufferDescriptor(location.data, start, location.offset-start);
// Attach the current packet as the buffer context and increment the refcount
data.context = packet;
packet.addRef();