From aa799342e5a28d3e52d82540040275887426ae81 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Thu, 31 Jul 2014 01:32:15 -0700 Subject: [PATCH] Video stream updates for GFE 2.1.1 --- .../nvstream/StreamConfiguration.java | 7 +- .../com/limelight/nvstream/av/RtpPacket.java | 22 +- .../nvstream/av/video/VideoDepacketizer.java | 205 +++++++----------- .../nvstream/av/video/VideoPacket.java | 39 +--- .../nvstream/av/video/VideoStream.java | 2 +- 5 files changed, 113 insertions(+), 162 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/StreamConfiguration.java b/moonlight-common/src/com/limelight/nvstream/StreamConfiguration.java index cc610ff5..bc21e28e 100644 --- a/moonlight-common/src/com/limelight/nvstream/StreamConfiguration.java +++ b/moonlight-common/src/com/limelight/nvstream/StreamConfiguration.java @@ -5,7 +5,6 @@ public class StreamConfiguration { private int width, height; private int refreshRate; private int bitrate; - private int maxPacketSize; private boolean sops; public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate) { @@ -14,17 +13,15 @@ public class StreamConfiguration { this.height = height; this.refreshRate = refreshRate; this.bitrate = bitrate; - this.maxPacketSize = 1024; this.sops = true; } - public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate, int maxPacketSize, boolean sops) { + public StreamConfiguration(String app, int width, int height, int refreshRate, int bitrate, boolean sops) { this.app = app; this.width = width; this.height = height; this.refreshRate = refreshRate; this.bitrate = bitrate; - this.maxPacketSize = maxPacketSize; this.sops = sops; } @@ -45,7 +42,7 @@ public class StreamConfiguration { } public int getMaxPacketSize() { - return maxPacketSize; + return 1024; } public String getApp() { diff --git a/moonlight-common/src/com/limelight/nvstream/av/RtpPacket.java b/moonlight-common/src/com/limelight/nvstream/av/RtpPacket.java index 0d0d15ed..06757e3c 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/RtpPacket.java +++ b/moonlight-common/src/com/limelight/nvstream/av/RtpPacket.java @@ -7,11 +7,16 @@ public class RtpPacket { private byte packetType; private short seqNum; + private int headerSize; private ByteBufferDescriptor buffer; private ByteBuffer bb; - public static final int HEADER_SIZE = 12; + public static final int FLAG_EXTENSION = 0x10; + + public static final int FIXED_HEADER_SIZE = 12; + public static final int MAX_HEADER_SIZE = 16; + public RtpPacket(byte[] buffer) { @@ -21,8 +26,11 @@ public class RtpPacket { public void initializeWithLength(int length) { - // Discard the first byte - bb.position(1); + // Rewind to start + bb.rewind(); + + // Read the RTP header byte + byte header = bb.get(); // Get the packet type packetType = bb.get(); @@ -30,6 +38,12 @@ public class RtpPacket { // Get the sequence number seqNum = bb.getShort(); + // If an extension is present, read the fields + headerSize = FIXED_HEADER_SIZE; + if ((header & FLAG_EXTENSION) != 0) { + headerSize += 4; // 2 additional fields + } + // Update descriptor length buffer.length = length; } @@ -51,6 +65,6 @@ public class RtpPacket { public void initializePayloadDescriptor(ByteBufferDescriptor bb) { - bb.reinitialize(buffer.data, buffer.offset+HEADER_SIZE, buffer.length-HEADER_SIZE); + bb.reinitialize(buffer.data, buffer.offset+headerSize, buffer.length-headerSize); } } diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index f4b84454..42d2c561 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -13,23 +13,21 @@ public class VideoDepacketizer { // Current frame state private LinkedList avcFrameDataChain = null; private int avcFrameDataLength = 0; - private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN; // Sequencing state private int lastPacketInStream = 0; private int nextFrameNumber = 1; - private int nextPacketNumber; private int startFrameNumber = 1; private boolean waitingForNextSuccessfulFrame; - private boolean gotNextFrameStart; private long frameStartTime; + private boolean decodingFrame; // Cached objects private ByteBufferDescriptor cachedReassemblyDesc = new ByteBufferDescriptor(null, 0, 0); private ByteBufferDescriptor cachedSpecialDesc = new ByteBufferDescriptor(null, 0, 0); private ConnectionStatusListener controlListener; - private int nominalPacketSize; + private final int nominalPacketDataLength; private static final int DU_LIMIT = 30; private PopulatedBufferList decodedUnits; @@ -37,7 +35,7 @@ public class VideoDepacketizer { public VideoDepacketizer(ConnectionStatusListener controlListener, int nominalPacketSize) { this.controlListener = controlListener; - this.nominalPacketSize = nominalPacketSize; + this.nominalPacketDataLength = nominalPacketSize - VideoPacket.HEADER_SIZE; decodedUnits = new PopulatedBufferList(DU_LIMIT, new PopulatedBufferList.BufferFactory() { public Object createFreeBuffer() { @@ -106,6 +104,8 @@ public class VideoDepacketizer { private void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location) { + boolean isDecodingH264 = false; + while (location.length != 0) { // Remember the start of the NAL data in this packet @@ -117,11 +117,14 @@ public class VideoDepacketizer { if (NAL.isAvcStartSequence(cachedSpecialDesc)) { // We're decoding H264 now - currentlyDecoding = DecodeUnit.TYPE_H264; - + isDecodingH264 = true; + // Check if it's the end of the last frame if (NAL.isAvcFrameStart(cachedSpecialDesc)) { + // Update the global state that we're decoding a new frame + this.decodingFrame = true; + // Reassemble any pending AVC NAL reassembleAvcFrame(packet.getFrameIndex()); @@ -137,14 +140,13 @@ public class VideoDepacketizer { else { // Check if this is padding after a full AVC frame - if (currentlyDecoding == DecodeUnit.TYPE_H264 && - NAL.isPadding(cachedSpecialDesc)) { + if (isDecodingH264 && NAL.isPadding(cachedSpecialDesc)) { // The decode unit is complete reassembleAvcFrame(packet.getFrameIndex()); } // Not decoding AVC - currentlyDecoding = DecodeUnit.TYPE_UNKNOWN; + isDecodingH264 = false; // Just skip this byte location.length--; @@ -163,8 +165,7 @@ public class VideoDepacketizer { { // Only stop if we're decoding something or this // isn't padding - if (currentlyDecoding != DecodeUnit.TYPE_UNKNOWN || - !NAL.isPadding(cachedSpecialDesc)) + if (isDecodingH264 || !NAL.isPadding(cachedSpecialDesc)) { break; } @@ -176,7 +177,7 @@ public class VideoDepacketizer { location.length--; } - if (currentlyDecoding == DecodeUnit.TYPE_H264 && avcFrameDataChain != null) + if (isDecodingH264 && avcFrameDataChain != null) { ByteBufferDescriptor data = new ByteBufferDescriptor(location.data, start, location.offset-start); @@ -201,106 +202,80 @@ public class VideoDepacketizer { avcFrameDataLength += location.length; } + private static boolean isFirstPacket(int flags) { + // Clear the picture data flag + flags &= ~VideoPacket.FLAG_CONTAINS_PIC_DATA; + + // Check if it's just the start or both start and end of a frame + return (flags == (VideoPacket.FLAG_SOF | VideoPacket.FLAG_EOF) || + flags == VideoPacket.FLAG_SOF); + } + public void addInputData(VideoPacket packet) { // Load our reassembly descriptor packet.initializePayloadDescriptor(cachedReassemblyDesc); - // Runt packets get decoded using the slow path - // These packets stand alone so there's no need to verify - // sequencing before submitting - if (cachedReassemblyDesc.length < nominalPacketSize - VideoPacket.HEADER_SIZE) { - addInputDataSlow(packet, cachedReassemblyDesc); - return; - } + int flags = packet.getFlags(); int frameIndex = packet.getFrameIndex(); - int packetIndex = packet.getPacketIndex(); - int packetsInFrame = packet.getTotalPackets(); + boolean firstPacket = isFirstPacket(flags); - // We can use FEC to correct single packet errors - // on single packet frames because we just get a - // duplicate of the original packet - if (packetsInFrame == 1 && packetIndex == 1 && - nextPacketNumber == 0 && frameIndex == nextFrameNumber) { - LimeLog.info("Using FEC for error correction"); - nextPacketNumber = 1; + // Look for a frame start before receiving a frame end + if (firstPacket && decodingFrame) + { + LimeLog.warning("Network dropped end of a frame"); + nextFrameNumber = frameIndex + 1; + + // Unexpected start of next frame before terminating the last + waitingForNextSuccessfulFrame = true; + + // Clear the old state and decode this frame + clearAvcFrameState(); } - // Discard the rest of the FEC data until we know how to use it - else if (packetIndex >= packetsInFrame) { - return; - } - - // Check that this is the next frame - boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0; - if (frameIndex > nextFrameNumber) { - // Nope, but we can still work with it if it's - // the start of the next frame - if (firstPacket) { - LimeLog.warning("Got start of frame "+frameIndex+ - " when expecting packet "+nextPacketNumber+ - " of frame "+nextFrameNumber); - nextFrameNumber = frameIndex; - nextPacketNumber = 0; - clearAvcFrameState(); - - // Tell the encoder when we're done decoding this frame - // that we lost some previous frames - waitingForNextSuccessfulFrame = true; - gotNextFrameStart = false; - } - else { - LimeLog.warning("Got packet "+packetIndex+" of frame "+frameIndex+ - " when expecting packet "+nextPacketNumber+ - " of frame "+nextFrameNumber); - // We dropped the start of this frame too - waitingForNextSuccessfulFrame = true; - gotNextFrameStart = false; - - // Try to pickup on the next frame + // Look for a non-frame start before a frame start + else if (!firstPacket && !decodingFrame) { + // Check if this looks like a real frame + if (flags == VideoPacket.FLAG_CONTAINS_PIC_DATA || + flags == VideoPacket.FLAG_EOF || + cachedReassemblyDesc.length < nominalPacketDataLength) + { + LimeLog.warning("Network dropped beginning of a frame"); nextFrameNumber = frameIndex + 1; - nextPacketNumber = 0; + + waitingForNextSuccessfulFrame = true; clearAvcFrameState(); return; } - } - else if (frameIndex < nextFrameNumber) { - LimeLog.info("Frame "+frameIndex+" is behind our current frame number "+nextFrameNumber); - // Discard the frame silently if it's behind our current sequence number - return; - } - - // We know it's the right frame, now check the packet number - if (packetIndex != nextPacketNumber) { - LimeLog.warning("Frame "+frameIndex+": expected packet "+nextPacketNumber+" but got "+packetIndex); - // At this point, we're guaranteed that it's not FEC data that we lost - waitingForNextSuccessfulFrame = true; - gotNextFrameStart = false; - - // Skip this frame - nextFrameNumber++; - nextPacketNumber = 0; - clearAvcFrameState(); - return; - } - - if (waitingForNextSuccessfulFrame) { - if (!gotNextFrameStart) { - if (!firstPacket) { - // We're waiting for the next frame, but this one is a fragment of a frame - // so we must discard it and wait for the next one - LimeLog.warning("Expected start of frame "+frameIndex); - - nextFrameNumber = frameIndex + 1; - nextPacketNumber = 0; - clearAvcFrameState(); - return; - } - else { - gotNextFrameStart = true; - } + else { + // FEC data + return; } } + // Check sequencing of this frame to ensure we didn't + // miss one in between + else if (firstPacket) { + // Make sure this is the next consecutive frame + if (nextFrameNumber < frameIndex) { + LimeLog.warning("Network dropped an entire frame"); + nextFrameNumber = frameIndex + 1; + + // Decode this one and hope for the best + waitingForNextSuccessfulFrame = true; + clearAvcFrameState(); + } + else if (nextFrameNumber > frameIndex){ + // Duplicate packet or FEC dup + return; + } + else { + // This will be the next expected frame + nextFrameNumber = frameIndex + 1; + } + + // We're now decoding a frame + decodingFrame = true; + } int streamPacketIndex = packet.getStreamPacketIndex(); if (streamPacketIndex != (int)(lastPacketInStream + 1)) { @@ -309,35 +284,23 @@ public class VideoDepacketizer { } lastPacketInStream = streamPacketIndex; - nextPacketNumber++; - - // Remove extra padding - cachedReassemblyDesc.length = packet.getPayloadLength(); - - if (firstPacket) - { - if (NAL.getSpecialSequenceDescriptor(cachedReassemblyDesc, cachedSpecialDesc) + if (firstPacket + && NAL.getSpecialSequenceDescriptor(cachedReassemblyDesc, cachedSpecialDesc) && NAL.isAvcFrameStart(cachedSpecialDesc) && cachedSpecialDesc.data[cachedSpecialDesc.offset+cachedSpecialDesc.length] == 0x67) - { - // SPS and PPS prefix is padded between NALs, so we must decode it with the slow path - clearAvcFrameState(); - addInputDataSlow(packet, cachedReassemblyDesc); - return; - } + { + // SPS and PPS prefix is padded between NALs, so we must decode it with the slow path + addInputDataSlow(packet, cachedReassemblyDesc); } - - addInputDataFast(packet, cachedReassemblyDesc, firstPacket); - - // We can't use the EOF flag here because real frames can be split across - // multiple "frames" when packetized to fit under the bandwidth ceiling - if (packetIndex + 1 >= packetsInFrame) { - nextFrameNumber++; - nextPacketNumber = 0; + else + { + // Everything else can take the fast path + addInputDataFast(packet, cachedReassemblyDesc, firstPacket); } - if ((packet.getFlags() & VideoPacket.FLAG_EOF) != 0) { + if ((flags & VideoPacket.FLAG_EOF) != 0) { reassembleAvcFrame(packet.getFrameIndex()); + decodingFrame = false; if (waitingForNextSuccessfulFrame) { // This is the next successful frame after a loss event diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java index cc141493..c25192a6 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java @@ -13,16 +13,14 @@ public class VideoPacket { private int dataOffset; private int frameIndex; - private int packetIndex; - private int totalPackets; - private int payloadLength; private int flags; private int streamPacketIndex; + public static final int FLAG_CONTAINS_PIC_DATA = 0x1; public static final int FLAG_EOF = 0x2; public static final int FLAG_SOF = 0x4; - public static final int HEADER_SIZE = 56; + public static final int HEADER_SIZE = 16; public VideoPacket(byte[] buffer) { @@ -36,12 +34,9 @@ public class VideoPacket { byteBuffer.rewind(); // Read the video header fields + streamPacketIndex = (byteBuffer.getInt() >> 8) & 0xFFFFFF; frameIndex = byteBuffer.getInt(); - packetIndex = byteBuffer.getInt(); - totalPackets = byteBuffer.getInt(); - flags = byteBuffer.getInt(); - payloadLength = byteBuffer.getInt(); - streamPacketIndex = byteBuffer.getInt(); + flags = byteBuffer.getInt() & 0xFF; // Data offset without the RTP header dataOffset = HEADER_SIZE; @@ -53,18 +48,15 @@ public class VideoPacket { public void initializeWithLength(int length) { // Skip the RTP header - byteBuffer.position(RtpPacket.HEADER_SIZE); + byteBuffer.position(RtpPacket.MAX_HEADER_SIZE); // Read the video header fields + streamPacketIndex = (byteBuffer.getInt() >> 8) & 0xFFFFFF; frameIndex = byteBuffer.getInt(); - packetIndex = byteBuffer.getInt(); - totalPackets = byteBuffer.getInt(); - flags = byteBuffer.getInt(); - payloadLength = byteBuffer.getInt(); - streamPacketIndex = byteBuffer.getInt(); + flags = byteBuffer.getInt() & 0xFF; // Data offset includes the RTP header - dataOffset = RtpPacket.HEADER_SIZE + HEADER_SIZE; + dataOffset = RtpPacket.MAX_HEADER_SIZE + HEADER_SIZE; // Update descriptor length buffer.length = length; @@ -80,21 +72,6 @@ public class VideoPacket { return frameIndex; } - public int getPacketIndex() - { - return packetIndex; - } - - public int getPayloadLength() - { - return payloadLength; - } - - public int getTotalPackets() - { - return totalPackets; - } - public int getStreamPacketIndex() { return streamPacketIndex; diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java index 62e1cdc7..b9567696 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java @@ -194,7 +194,7 @@ public class VideoStream { int ringIndex = 0; // Preinitialize the ring buffer - int requiredBufferSize = streamConfig.getMaxPacketSize() + RtpPacket.HEADER_SIZE; + int requiredBufferSize = streamConfig.getMaxPacketSize() + RtpPacket.MAX_HEADER_SIZE; for (int i = 0; i < VIDEO_RING_SIZE; i++) { ring[i] = new VideoPacket(new byte[requiredBufferSize]); }