Add experimental reference frame invalidation support

This commit is contained in:
Cameron Gutman 2015-08-11 21:12:34 -07:00
parent 36c320a584
commit c19ff71c9a
6 changed files with 95 additions and 32 deletions

View File

@ -4,6 +4,8 @@ import java.net.InetAddress;
import javax.crypto.SecretKey; import javax.crypto.SecretKey;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
public class ConnectionContext { public class ConnectionContext {
// Gen 3 servers are 2.1.1 - 2.2.1 // Gen 3 servers are 2.1.1 - 2.2.1
public static final int SERVER_GENERATION_3 = 3; public static final int SERVER_GENERATION_3 = 3;
@ -13,6 +15,7 @@ public class ConnectionContext {
public InetAddress serverAddress; public InetAddress serverAddress;
public StreamConfiguration streamConfig; public StreamConfiguration streamConfig;
public VideoDecoderRenderer videoDecoderRenderer;
public NvConnectionListener connListener; public NvConnectionListener connListener;
public SecretKey riKey; public SecretKey riKey;
public int riKeyId; public int riKeyId;

View File

@ -41,7 +41,6 @@ public class NvConnection {
// Start parameters // Start parameters
private int drFlags; private int drFlags;
private Object videoRenderTarget; private Object videoRenderTarget;
private VideoDecoderRenderer videoDecoderRenderer;
private AudioRenderer audioRenderer; private AudioRenderer audioRenderer;
public NvConnection(String host, String uniqueId, NvConnectionListener listener, StreamConfiguration config, LimelightCryptoProvider cryptoProvider) public NvConnection(String host, String uniqueId, NvConnectionListener listener, StreamConfiguration config, LimelightCryptoProvider cryptoProvider)
@ -246,7 +245,7 @@ public class NvConnection {
private boolean startVideoStream() throws IOException private boolean startVideoStream() throws IOException
{ {
videoStream = new VideoStream(context, controlStream); videoStream = new VideoStream(context, controlStream);
return videoStream.startVideoStream(videoDecoderRenderer, videoRenderTarget, drFlags); return videoStream.startVideoStream(videoRenderTarget, drFlags);
} }
private boolean startAudioStream() throws IOException private boolean startAudioStream() throws IOException
@ -329,7 +328,7 @@ public class NvConnection {
this.drFlags = drFlags; this.drFlags = drFlags;
this.audioRenderer = audioRenderer; this.audioRenderer = audioRenderer;
this.videoRenderTarget = videoRenderTarget; this.videoRenderTarget = videoRenderTarget;
this.videoDecoderRenderer = videoDecoderRenderer; this.context.videoDecoderRenderer = videoDecoderRenderer;
new Thread(new Runnable() { new Thread(new Runnable() {
public void run() { public void run() {

View File

@ -14,6 +14,10 @@ public abstract class VideoDecoderRenderer {
// Allows decode units to be submitted directly from the receive thread // Allows decode units to be submitted directly from the receive thread
public static final int CAPABILITY_DIRECT_SUBMIT = 0x2; public static final int CAPABILITY_DIRECT_SUBMIT = 0x2;
// !!! EXPERIMENTAL !!!
// Allows reference frame invalidation to be use to recover from packet loss
public static final int CAPABILITY_REFERENCE_FRAME_INVALIDATION = 0x4;
public int getCapabilities() { public int getCapabilities() {
return 0; return 0;
} }

View File

@ -1,6 +1,7 @@
package com.limelight.nvstream.av.video; package com.limelight.nvstream.av.video;
import com.limelight.LimeLog; import com.limelight.LimeLog;
import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.ByteBufferDescriptor; import com.limelight.nvstream.av.ByteBufferDescriptor;
import com.limelight.nvstream.av.DecodeUnit; import com.limelight.nvstream.av.DecodeUnit;
import com.limelight.nvstream.av.ConnectionStatusListener; import com.limelight.nvstream.av.ConnectionStatusListener;
@ -26,6 +27,7 @@ public class VideoDepacketizer {
private boolean waitingForIdrFrame = true; private boolean waitingForIdrFrame = true;
private long frameStartTime; private long frameStartTime;
private boolean decodingFrame; private boolean decodingFrame;
private boolean strictIdrFrameWait;
// Cached objects // Cached objects
private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0); private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0);
@ -40,11 +42,23 @@ public class VideoDepacketizer {
private static final int DU_LIMIT = 15; private static final int DU_LIMIT = 15;
private AbstractPopulatedBufferList<DecodeUnit> decodedUnits; private AbstractPopulatedBufferList<DecodeUnit> decodedUnits;
public VideoDepacketizer(ConnectionStatusListener controlListener, int nominalPacketSize, boolean unsynchronized) public VideoDepacketizer(ConnectionContext context, ConnectionStatusListener controlListener, int nominalPacketSize)
{ {
this.controlListener = controlListener; this.controlListener = controlListener;
this.nominalPacketDataLength = nominalPacketSize - VideoPacket.HEADER_SIZE; this.nominalPacketDataLength = nominalPacketSize - VideoPacket.HEADER_SIZE;
boolean unsynchronized;
if (context.videoDecoderRenderer != null) {
int videoCaps = context.videoDecoderRenderer.getCapabilities();
this.strictIdrFrameWait = (videoCaps & VideoDecoderRenderer.CAPABILITY_REFERENCE_FRAME_INVALIDATION) == 0;
unsynchronized = (videoCaps & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) != 0;
}
else {
// If there's no renderer, it doesn't matter if we synchronize or wait for IDRs
this.strictIdrFrameWait = false;
unsynchronized = true;
}
AbstractPopulatedBufferList.BufferFactory factory = new AbstractPopulatedBufferList.BufferFactory() { AbstractPopulatedBufferList.BufferFactory factory = new AbstractPopulatedBufferList.BufferFactory() {
public Object createFreeBuffer() { public Object createFreeBuffer() {
return new DecodeUnit(); return new DecodeUnit();
@ -71,7 +85,10 @@ public class VideoDepacketizer {
private void dropAvcFrameState() private void dropAvcFrameState()
{ {
// We'll need an IDR frame now if we're in strict mode
if (strictIdrFrameWait) {
waitingForIdrFrame = true; waitingForIdrFrame = true;
}
// Count the number of consecutive frames dropped // Count the number of consecutive frames dropped
consecutiveFrameDrops++; consecutiveFrameDrops++;
@ -84,7 +101,7 @@ public class VideoDepacketizer {
// Restart the count // Restart the count
consecutiveFrameDrops = 0; consecutiveFrameDrops = 0;
// Request an IDR frame // Request an IDR frame (0 tuple always generates an IDR frame)
controlListener.connectionDetectedFrameLoss(0, 0); controlListener.connectionDetectedFrameLoss(0, 0);
} }
@ -128,7 +145,9 @@ public class VideoDepacketizer {
LimeLog.warning("Video decoder is too slow! Forced to drop decode units"); LimeLog.warning("Video decoder is too slow! Forced to drop decode units");
// Invalidate all frames from the start of the DU queue // Invalidate all frames from the start of the DU queue
// (0 tuple always generates an IDR frame)
controlListener.connectionSinkTooSlow(0, 0); controlListener.connectionSinkTooSlow(0, 0);
waitingForIdrFrame = true;
// Remove existing frames // Remove existing frames
decodedUnits.clearPopulatedObjects(); decodedUnits.clearPopulatedObjects();
@ -330,7 +349,6 @@ public class VideoDepacketizer {
// Unexpected start of next frame before terminating the last // Unexpected start of next frame before terminating the last
waitingForNextSuccessfulFrame = true; waitingForNextSuccessfulFrame = true;
waitingForIdrFrame = true;
// Clear the old state and wait for an IDR // Clear the old state and wait for an IDR
dropAvcFrameState(); dropAvcFrameState();

View File

@ -118,8 +118,7 @@ public class VideoStream {
public boolean setupDecoderRenderer(VideoDecoderRenderer decRend, Object renderTarget, int drFlags) { public boolean setupDecoderRenderer(VideoDecoderRenderer decRend, Object renderTarget, int drFlags) {
this.decRend = decRend; this.decRend = decRend;
depacketizer = new VideoDepacketizer(avConnListener, context.streamConfig.getMaxPacketSize(), depacketizer = new VideoDepacketizer(context, avConnListener, context.streamConfig.getMaxPacketSize());
decRend != null && (decRend.getCapabilities() & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) != 0);
if (decRend != null) { if (decRend != null) {
try { try {
@ -143,10 +142,10 @@ public class VideoStream {
return true; return true;
} }
public boolean startVideoStream(VideoDecoderRenderer decRend, Object renderTarget, int drFlags) throws IOException public boolean startVideoStream(Object renderTarget, int drFlags) throws IOException
{ {
// Setup the decoder and renderer // Setup the decoder and renderer
if (!setupDecoderRenderer(decRend, renderTarget, drFlags)) { if (!setupDecoderRenderer(context.videoDecoderRenderer, renderTarget, drFlags)) {
// Nothing to cleanup here // Nothing to cleanup here
throw new IOException("Video decoder failed to initialize. Please restart your device and try again."); throw new IOException("Video decoder failed to initialize. Please restart your device and try again.");
} }
@ -154,7 +153,7 @@ public class VideoStream {
// Open RTP sockets and start session // Open RTP sockets and start session
setupRtpSession(); setupRtpSession();
if (decRend != null) { if (this.decRend != null) {
// Start the receive thread early to avoid missing // Start the receive thread early to avoid missing
// early packets that are part of the IDR frame // early packets that are part of the IDR frame
startReceiveThread(); startReceiveThread();

View File

@ -12,6 +12,7 @@ import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.LimeLog; import com.limelight.LimeLog;
import com.limelight.nvstream.ConnectionContext; import com.limelight.nvstream.ConnectionContext;
import com.limelight.nvstream.av.ConnectionStatusListener; import com.limelight.nvstream.av.ConnectionStatusListener;
import com.limelight.nvstream.av.video.VideoDecoderRenderer;
public class ControlStream implements ConnectionStatusListener { public class ControlStream implements ConnectionStatusListener {
@ -101,6 +102,7 @@ public class ControlStream implements ConnectionStatusListener {
private Thread resyncThread; private Thread resyncThread;
private LinkedBlockingQueue<int[]> invalidReferenceFrameTuples = new LinkedBlockingQueue<int[]>(); private LinkedBlockingQueue<int[]> invalidReferenceFrameTuples = new LinkedBlockingQueue<int[]>();
private boolean aborting = false; private boolean aborting = false;
private boolean forceIdrRequest;
private final short[] packetTypes; private final short[] packetTypes;
private final short[] payloadLengths; private final short[] payloadLengths;
@ -124,6 +126,11 @@ public class ControlStream implements ConnectionStatusListener {
preconstructedPayloads = precontructedPayloadsGen4; preconstructedPayloads = precontructedPayloadsGen4;
break; break;
} }
if (context.videoDecoderRenderer != null) {
forceIdrRequest = (context.videoDecoderRenderer.getCapabilities() &
VideoDecoderRenderer.CAPABILITY_REFERENCE_FRAME_INVALIDATION) == 0;
}
} }
public void initialize() throws IOException public void initialize() throws IOException
@ -239,6 +246,7 @@ public class ControlStream implements ConnectionStatusListener {
while (!isInterrupted()) while (!isInterrupted())
{ {
int[] tuple; int[] tuple;
boolean idrFrameRequired = false;
// Wait for a tuple // Wait for a tuple
try { try {
@ -248,17 +256,36 @@ public class ControlStream implements ConnectionStatusListener {
return; return;
} }
// Aggregate all lost frames into one range // Check for the magic IDR frame tuple
int[] lastTuple = null; int[] lastTuple = null;
if (tuple[0] != 0 || tuple[1] != 0) {
// Aggregate all lost frames into one range
for (;;) { for (;;) {
int[] nextTuple = lastTuple = invalidReferenceFrameTuples.poll(); int[] nextTuple = lastTuple = invalidReferenceFrameTuples.poll();
if (nextTuple == null) { if (nextTuple == null) {
break; break;
} }
lastTuple = nextTuple; // Check if this tuple has IDR frame magic values
if (nextTuple[0] == 0 && nextTuple[1] == 0) {
// We will need an IDR frame now, but we won't break out
// of the loop because we want to dequeue all pending requests
idrFrameRequired = true;
} }
lastTuple = nextTuple;
}
}
else {
// We must require an IDR frame
idrFrameRequired = true;
}
try {
if (forceIdrRequest || idrFrameRequired) {
requestIdrFrame();
}
else {
// The server expects this to be the firstLostFrame + 1 // The server expects this to be the firstLostFrame + 1
tuple[0]++; tuple[0]++;
@ -267,10 +294,8 @@ public class ControlStream implements ConnectionStatusListener {
tuple[1] = lastTuple[1]; tuple[1] = lastTuple[1];
} }
try { invalidateReferenceFrames(tuple[0], tuple[1]);
LimeLog.warning("Invalidating reference frames from "+tuple[0]+" to "+tuple[1]); }
ControlStream.this.invalidateReferenceFrames(tuple[0], tuple[1]);
LimeLog.warning("Frames invalidated");
} catch (IOException e) { } catch (IOException e) {
context.connListener.connectionTerminated(e); context.connListener.connectionTerminated(e);
return; return;
@ -310,9 +335,7 @@ public class ControlStream implements ConnectionStatusListener {
} }
} }
private void invalidateReferenceFrames(int firstLostFrame, int nextSuccessfulFrame) throws IOException private void requestIdrFrame() throws IOException {
{
// We can't handle a real reference frame invalidation yet.
// On Gen 3, we use the invalidate reference frames trick which works for about 5 hours of streaming at 60 FPS // On Gen 3, we use the invalidate reference frames trick which works for about 5 hours of streaming at 60 FPS
// On Gen 4+, we use the known IDR frame request packet // On Gen 4+, we use the known IDR frame request packet
@ -333,6 +356,23 @@ public class ControlStream implements ConnectionStatusListener {
(short) preconstructedPayloads[IDX_REQUEST_IDR_FRAME].length, (short) preconstructedPayloads[IDX_REQUEST_IDR_FRAME].length,
preconstructedPayloads[IDX_REQUEST_IDR_FRAME])); preconstructedPayloads[IDX_REQUEST_IDR_FRAME]));
} }
LimeLog.warning("IDR frame request sent");
}
private void invalidateReferenceFrames(int firstLostFrame, int nextSuccessfulFrame) throws IOException {
LimeLog.warning("Invalidating reference frames from "+firstLostFrame+" to "+nextSuccessfulFrame);
ByteBuffer conf = ByteBuffer.wrap(new byte[payloadLengths[IDX_INVALIDATE_REF_FRAMES]]).order(ByteOrder.LITTLE_ENDIAN);
conf.putLong(firstLostFrame);
conf.putLong(nextSuccessfulFrame);
conf.putLong(0);
sendAndGetReply(new NvCtlPacket(packetTypes[IDX_INVALIDATE_REF_FRAMES],
payloadLengths[IDX_INVALIDATE_REF_FRAMES], conf.array()));
LimeLog.warning("Reference frame invalidation sent");
} }
static class NvCtlPacket { static class NvCtlPacket {