From ae18e00b13903469b81e0e6be825a996aafc06cd Mon Sep 17 00:00:00 2001 From: Iwan Timmer Date: Mon, 3 Feb 2014 00:30:44 +0100 Subject: [PATCH 01/17] Reassemble NAL's early --- .../com/limelight/nvstream/av/video/VideoDepacketizer.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index 61d8a1d2..afb51f09 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -193,6 +193,10 @@ public class VideoDepacketizer { // Add a buffer descriptor describing the NAL data in this packet avcNalDataChain.add(data); avcNalDataLength += location.offset-start; + + // Reassemble the NALs if this was the last packet for this frame + if (packet.getPacketIndex() + 1 == packet.getTotalPackets()) + reassembleAvcNal(); } } } From 1a38cc2c0ceaae32a428d445a48a3ed36376cd3c Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Tue, 4 Feb 2014 09:10:18 -0500 Subject: [PATCH 02/17] Update config based on Shield OTA 68 --- moonlight-common/src/com/limelight/nvstream/control/Config.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moonlight-common/src/com/limelight/nvstream/control/Config.java b/moonlight-common/src/com/limelight/nvstream/control/Config.java index 476d8650..b7e090bd 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/Config.java +++ b/moonlight-common/src/com/limelight/nvstream/control/Config.java @@ -48,7 +48,7 @@ public class Config { 436207616, 855638290, 266779, - 7000, + 10000, 266780, 2000, 266781, From 0cce5b021e3ab04614dbcea312ec36482a5316d1 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 13:39:18 -0500 Subject: [PATCH 03/17] New video depacketizer that runs in O(1) time --- .../nvstream/av/video/VideoDepacketizer.java | 128 +++++++++--------- 1 file changed, 64 insertions(+), 64 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index afb51f09..25014dce 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -14,6 +14,7 @@ public class VideoDepacketizer { private LinkedList avcNalDataChain = null; private int avcNalDataLength = 0; private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN; + private boolean splitFrame = false; // Sequencing state private short lastSequenceNumber; @@ -55,67 +56,9 @@ public class VideoDepacketizer { } } - /* Currently unused pending bugfixes */ - public void addInputDataO1(VideoPacket packet) + /* Legacy depacketizer */ + public void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location) { - ByteBufferDescriptor location = packet.getNewPayloadDescriptor(); - - // SPS and PPS packet doesn't have standard headers, so submit it as is - if (location.length < 968) { - avcNalDataChain = new LinkedList(); - avcNalDataLength = 0; - - avcNalDataChain.add(location); - avcNalDataLength += location.length; - - reassembleAvcNal(); - } - else { - int packetIndex = packet.getPacketIndex(); - int packetsInFrame = packet.getTotalPackets(); - - // Check if this is the first packet for a frame - if (packetIndex == 0) { - // Setup state for the new frame - avcNalDataChain = new LinkedList(); - avcNalDataLength = 0; - } - - // Check if this packet falls in the range of packets in frame - if (packetIndex >= packetsInFrame) { - // This isn't H264 frame data - return; - } - - // Adjust the length to only contain valid data - location.length = packet.getPayloadLength(); - - // Add the payload data to the chain - if (avcNalDataChain != null) { - avcNalDataChain.add(location); - avcNalDataLength += location.length; - } - - // Reassemble the NALs if this was the last packet for this frame - if (packetIndex + 1 == packetsInFrame) { - reassembleAvcNal(); - } - } - } - - public void addInputData(VideoPacket packet) - { - ByteBufferDescriptor location = packet.getNewPayloadDescriptor(); - - if (location.length == 968) { - if (packet.getPacketIndex() < packet.getTotalPackets()) { - location.length = packet.getPayloadLength(); - } - else { - return; - } - } - while (location.length != 0) { // Remember the start of the NAL data in this packet @@ -193,14 +136,71 @@ public class VideoDepacketizer { // Add a buffer descriptor describing the NAL data in this packet avcNalDataChain.add(data); avcNalDataLength += location.offset-start; - - // Reassemble the NALs if this was the last packet for this frame - if (packet.getPacketIndex() + 1 == packet.getTotalPackets()) - reassembleAvcNal(); } } } + + public void addInputDataFast(VideoPacket packet, ByteBufferDescriptor location, boolean firstPacket) + { + if (firstPacket) { + // Setup state for the new frame + avcNalDataChain = new LinkedList(); + avcNalDataLength = 0; + } + + // Add the payload data to the chain + if (avcNalDataChain != null) { + avcNalDataChain.add(location); + avcNalDataLength += location.length; + } + } + + public void addInputData(VideoPacket packet) + { + ByteBufferDescriptor location = packet.getNewPayloadDescriptor(); + + // Runt packets can go directly to the decoder + if (location.length < 968) + { + avcNalDataChain = new LinkedList(); + avcNalDataLength = 0; + avcNalDataChain.add(location); + avcNalDataLength += location.length; + + reassembleAvcNal(); + + return; + } + + int packetIndex = packet.getPacketIndex(); + int packetsInFrame = packet.getTotalPackets(); + + // Discard FEC data early + if (packetIndex >= packetsInFrame) { + return; + } + + // Remove extra padding + location.length = packet.getPayloadLength(); + + boolean firstPacket = !splitFrame && packetIndex == 0; + + // Reset split frame state on next frame start + if (packetIndex == 0) { + splitFrame = false; + } + + addInputDataFast(packet, location, firstPacket); + + if (!splitFrame && packetIndex + 1 == packetsInFrame) { + // Reassemble the frame if this was the last packet and it's not a split frame + if (packet.getPayloadLength() == 968) + splitFrame = true; + else + reassembleAvcNal(); + } + } public void addInputData(RtpPacket packet) { From 26809c4b6bb4e228f59839f467a23c3ae48cd9ea Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 13:57:15 -0500 Subject: [PATCH 04/17] Decode parameter set NALs with the slow path so the SPS fixup hack still works --- .../nvstream/av/video/VideoDepacketizer.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index 25014dce..61feae5e 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -56,7 +56,6 @@ public class VideoDepacketizer { } } - /* Legacy depacketizer */ public void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location) { while (location.length != 0) @@ -159,17 +158,9 @@ public class VideoDepacketizer { { ByteBufferDescriptor location = packet.getNewPayloadDescriptor(); - // Runt packets can go directly to the decoder - if (location.length < 968) - { - avcNalDataChain = new LinkedList(); - avcNalDataLength = 0; - - avcNalDataChain.add(location); - avcNalDataLength += location.length; - - reassembleAvcNal(); - + // Runt packets get decoded using the slow path + if (location.length < 968) { + addInputDataSlow(packet, location); return; } @@ -190,6 +181,17 @@ public class VideoDepacketizer { if (packetIndex == 0) { splitFrame = false; } + + if (firstPacket) + { + if (NAL.getSpecialSequenceDescriptor(location, cachedDesc) && NAL.isAvcFrameStart(cachedDesc) + && cachedDesc.data[cachedDesc.offset+cachedDesc.length] == 0x67) + { + // SPS and PPS prefix is padded between NALs, so we must decode it with the slow path + addInputDataSlow(packet, location); + return; + } + } addInputDataFast(packet, location, firstPacket); From c93812179f0fbad3859c96b1bb9ee8f466771c51 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 13:47:43 -0500 Subject: [PATCH 05/17] Updated config code for artifact-free 1080p60 streaming --- .../nvstream/control/ByteConfigTuple.java | 17 ++ .../limelight/nvstream/control/Config.java | 273 ++++++++++-------- .../nvstream/control/ConfigTuple.java | 53 ++++ .../nvstream/control/IntConfigTuple.java | 23 ++ .../nvstream/control/ShortConfigTuple.java | 23 ++ 5 files changed, 274 insertions(+), 115 deletions(-) create mode 100644 moonlight-common/src/com/limelight/nvstream/control/ByteConfigTuple.java create mode 100644 moonlight-common/src/com/limelight/nvstream/control/ConfigTuple.java create mode 100644 moonlight-common/src/com/limelight/nvstream/control/IntConfigTuple.java create mode 100644 moonlight-common/src/com/limelight/nvstream/control/ShortConfigTuple.java diff --git a/moonlight-common/src/com/limelight/nvstream/control/ByteConfigTuple.java b/moonlight-common/src/com/limelight/nvstream/control/ByteConfigTuple.java new file mode 100644 index 00000000..78b6a253 --- /dev/null +++ b/moonlight-common/src/com/limelight/nvstream/control/ByteConfigTuple.java @@ -0,0 +1,17 @@ +package com.limelight.nvstream.control; + +public class ByteConfigTuple extends ConfigTuple { + public static final short PAYLOAD_LENGTH = 1; + + public byte payload; + + public ByteConfigTuple(short packetType, byte payload) { + super(packetType, PAYLOAD_LENGTH); + this.payload = payload; + } + + @Override + public byte[] payloadToWire() { + return new byte[] {payload}; + } +} diff --git a/moonlight-common/src/com/limelight/nvstream/control/Config.java b/moonlight-common/src/com/limelight/nvstream/control/Config.java index b7e090bd..bfd21b52 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/Config.java +++ b/moonlight-common/src/com/limelight/nvstream/control/Config.java @@ -2,141 +2,184 @@ package com.limelight.nvstream.control; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.HashSet; import com.limelight.nvstream.StreamConfiguration; public class Config { - public static final int[] UNKNOWN_CONFIG = + public static final ConfigTuple[] CONFIG_720_60 = { - 70151, - 68291329, - 1280, - 68291584, - 1280, - 68291840, - 15360, - 68292096, - 25600, - 68292352, - 2048, - 68292608, - 1024, - 68289024, - 262144, - 17957632, - 302055424, - 134217729, - 16777490, - 70153, - 68293120, - 768000, - 17961216, - 303235072, - 335609857, - 838861842, - 352321536, - 1006634002, - 369098752, - 335545362, - 385875968, - 1042, - 402653184, - 134218770, - 419430400, - 167773202, - 436207616, - 855638290, - 266779, - 10000, - 266780, - 2000, - 266781, - 50, - 266782, - 3000, - 266783, - 2, - 266794, - 5000, - 266795, - 500, - 266784, - 75, - 266785, - 25, - 266786, - 10, - 266787, - 60, - 266788, - 30, - 266789, - 3, - 266790, - 1000, - 266791, - 5000, - 266792, - 5000, - 266793, - 5000, - 70190, - 68301063, - 10240, - 68301312, - 6400, - 68301568, - 768000, - 68299776, - 768, - 68300032, - 2560, - 68300544, - 0, - 34746368, - (int)0xFE000000 + new IntConfigTuple((short)0x1206, 1), + new ByteConfigTuple((short)0x1207, (byte)1), + new IntConfigTuple((short)0x120b, 7), + new IntConfigTuple((short)0x120c, 7), + new IntConfigTuple((short)0x120d, 60), + new IntConfigTuple((short)0x120e, 100), + new IntConfigTuple((short)0x120f, 5), + new IntConfigTuple((short)0x1210, 4), + new IntConfigTuple((short)0x1202, 1024), + new ByteConfigTuple((short)0x1203, (byte)0), + new ByteConfigTuple((short)0x1201, (byte)0), + new ByteConfigTuple((short)0x1234, (byte)0), + new ByteConfigTuple((short)0x1248, (byte)0), + new ByteConfigTuple((short)0x1208, (byte)1), + new ByteConfigTuple((short)0x1209, (byte)0), + new IntConfigTuple((short)0x1212, 3000), + new IntConfigTuple((short)0x1238, 10000), + new ByteConfigTuple((short)0x1211, (byte)0), + new ByteConfigTuple((short)0x1213, (byte)1), + new IntConfigTuple((short)0x1214, 50), + new IntConfigTuple((short)0x1215, 60), + new IntConfigTuple((short)0x1216, 20), + new IntConfigTuple((short)0x1217, 0), + new IntConfigTuple((short)0x1218, 8), + new IntConfigTuple((short)0x1219, 10), + new IntConfigTuple((short)0x121a, 311), + new IntConfigTuple((short)0x121b, 10000), + new IntConfigTuple((short)0x121c, 2000), + new IntConfigTuple((short)0x121d, 50), + new IntConfigTuple((short)0x121e, 3000), + new IntConfigTuple((short)0x121f, 2), + new IntConfigTuple((short)0x122a, 5000), + new IntConfigTuple((short)0x122b, 500), + new IntConfigTuple((short)0x1220, 75), + new IntConfigTuple((short)0x1221, 25), + new IntConfigTuple((short)0x1222, 10), + new IntConfigTuple((short)0x1223, 60), + new IntConfigTuple((short)0x1224, 30), + new IntConfigTuple((short)0x1225, 3), + new IntConfigTuple((short)0x1226, 1000), + new IntConfigTuple((short)0x1227, 5000), + new IntConfigTuple((short)0x1228, 5000), + new IntConfigTuple((short)0x124e, 110), + new IntConfigTuple((short)0x1237, 10), + new IntConfigTuple((short)0x1236, 6), + new IntConfigTuple((short)0x1235, 4), + new IntConfigTuple((short)0x1242, 20000), + new IntConfigTuple((short)0x1244, 100), + new IntConfigTuple((short)0x1245, 1000), + new IntConfigTuple((short)0x1246, 720), + new IntConfigTuple((short)0x1247, 480), + new IntConfigTuple((short)0x1229, 5000), + new ByteConfigTuple((short)0x122e, (byte)7), + new IntConfigTuple((short)0x1231, 40), + new IntConfigTuple((short)0x1232, 25), + new IntConfigTuple((short)0x1233, 3000), + new IntConfigTuple((short)0x122c, 3), + new IntConfigTuple((short)0x122d, 10), + new IntConfigTuple((short)0x123b, 12), + new IntConfigTuple((short)0x123c, 3), + new IntConfigTuple((short)0x1249, 0), + new IntConfigTuple((short)0x124a, 4000), + new IntConfigTuple((short)0x124b, 5000), + new IntConfigTuple((short)0x124c, 6000), + new IntConfigTuple((short)0x124d, 1000), + new IntConfigTuple((short)0x122f, 0), + new ShortConfigTuple((short)0x1230, (short)0), + new IntConfigTuple((short)0x1239, 0), + new IntConfigTuple((short)0x123a, 0), + new IntConfigTuple((short)0x123d, 96000), + new IntConfigTuple((short)0x123e, 5), + new IntConfigTuple((short)0x123f, 1), + new IntConfigTuple((short)0x1243, 100) }; - public static final int CONFIG_SIZE = ((8 + UNKNOWN_CONFIG.length) * 4) + 3; + public static final ConfigTuple[] CONFIG_1080_30_DIFF = + { + new IntConfigTuple((short)0x120b, 10), + new IntConfigTuple((short)0x120c, 10), + new IntConfigTuple((short)0x121c, 4000), + new IntConfigTuple((short)0x1245, 3000), + new IntConfigTuple((short)0x1246, 1280), + new IntConfigTuple((short)0x1247, 720), + new IntConfigTuple((short)0x124a, 5000), + new IntConfigTuple((short)0x124c, 7000), + }; + public static final ConfigTuple[] CONFIG_1080_60_DIFF = + { + new IntConfigTuple((short)0x120b, 30), + new IntConfigTuple((short)0x120c, 30), + new IntConfigTuple((short)0x120f, 4), + new IntConfigTuple((short)0x121b, 30000), + new IntConfigTuple((short)0x121c, 25000), + new IntConfigTuple((short)0x1245, 3000), + new IntConfigTuple((short)0x1246, 1280), + new IntConfigTuple((short)0x1247, 720), + new IntConfigTuple((short)0x124a, 5000), + new IntConfigTuple((short)0x124c, 7000), + }; + private StreamConfiguration streamConfig; public Config(StreamConfiguration streamConfig) { this.streamConfig = streamConfig; } + private void updateSetWithConfig(HashSet set, ConfigTuple[] config) + { + for (ConfigTuple tuple : config) + { + // Remove any existing tuple of this type + set.remove(tuple); + + set.add(tuple); + } + } + + private int getConfigOnWireSize(HashSet tupleSet) + { + int size = 0; + + for (ConfigTuple t : tupleSet) + { + size += ConfigTuple.HEADER_LENGTH + t.payloadLength; + } + + return size; + } + + private HashSet generateTupleSet() { + HashSet tupleSet = new HashSet(); + + // Start with the initial config for 720p60 + updateSetWithConfig(tupleSet, CONFIG_720_60); + + if (streamConfig.getWidth() >= 1920 && + streamConfig.getHeight() >= 1080) + { + if (streamConfig.getRefreshRate() >= 60) + { + // Update the initial set with the changed 1080p60 options + updateSetWithConfig(tupleSet, CONFIG_1080_60_DIFF); + } + else + { + // Update the initial set with the changed 1080p30 options + updateSetWithConfig(tupleSet, CONFIG_1080_30_DIFF); + } + } + + tupleSet.add(new IntConfigTuple((short)0x1204, streamConfig.getWidth())); + tupleSet.add(new IntConfigTuple((short)0x1205, streamConfig.getHeight())); + tupleSet.add(new IntConfigTuple((short)0x120A, streamConfig.getRefreshRate())); + + return tupleSet; + } + public byte[] toWire() { - ByteBuffer bb = ByteBuffer.allocate(CONFIG_SIZE).order(ByteOrder.LITTLE_ENDIAN); + HashSet tupleSet = generateTupleSet(); + ByteBuffer bb = ByteBuffer.allocate(getConfigOnWireSize(tupleSet) + 4).order(ByteOrder.LITTLE_ENDIAN); - // Width - bb.putShort((short) 0x1204); - bb.putShort((short) 0x0004); - bb.putInt(streamConfig.getWidth()); - - // Height - bb.putShort((short) 0x1205); - bb.putShort((short) 0x0004); - bb.putInt(streamConfig.getHeight()); - - // Unknown - bb.putShort((short) 0x1206); - bb.putShort((short) 0x0004); - bb.putInt(1); - - // Refresh rate - bb.putShort((short) 0x120A); - bb.putShort((short) 0x0004); - bb.putInt(streamConfig.getRefreshRate()); - - // The rest are hardcoded - for (int i : UNKNOWN_CONFIG) { - bb.putInt(i); + for (ConfigTuple t : tupleSet) + { + bb.put(t.toWire()); } // Config tail - bb.putShort((short) 0x0013); - bb.put((byte) 0x00); + bb.putShort((short) 0x13fe); + bb.putShort((short) 0x00); return bb.array(); } diff --git a/moonlight-common/src/com/limelight/nvstream/control/ConfigTuple.java b/moonlight-common/src/com/limelight/nvstream/control/ConfigTuple.java new file mode 100644 index 00000000..ffd4457a --- /dev/null +++ b/moonlight-common/src/com/limelight/nvstream/control/ConfigTuple.java @@ -0,0 +1,53 @@ +package com.limelight.nvstream.control; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +public abstract class ConfigTuple { + public short packetType; + public short payloadLength; + + public static final short HEADER_LENGTH = 4; + + public ConfigTuple(short packetType, short payloadLength) + { + this.packetType = packetType; + this.payloadLength = payloadLength; + } + + public abstract byte[] payloadToWire(); + + public byte[] toWire() + { + byte[] payload = payloadToWire(); + ByteBuffer bb = ByteBuffer.allocate(HEADER_LENGTH + (payload != null ? payload.length : 0)) + .order(ByteOrder.LITTLE_ENDIAN); + + bb.putShort(packetType); + bb.putShort(payloadLength); + + if (payload != null) { + bb.put(payload); + } + + return bb.array(); + } + + @Override + public int hashCode() + { + return packetType; + } + + @Override + public boolean equals(Object o) + { + // We only compare the packet types on purpose + if (o instanceof ConfigTuple) { + return ((ConfigTuple)o).packetType == packetType; + } + else { + return false; + } + } +} diff --git a/moonlight-common/src/com/limelight/nvstream/control/IntConfigTuple.java b/moonlight-common/src/com/limelight/nvstream/control/IntConfigTuple.java new file mode 100644 index 00000000..9fe43c14 --- /dev/null +++ b/moonlight-common/src/com/limelight/nvstream/control/IntConfigTuple.java @@ -0,0 +1,23 @@ +package com.limelight.nvstream.control; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +public class IntConfigTuple extends ConfigTuple { + + public static final short PAYLOAD_LENGTH = 4; + + public int payload; + + public IntConfigTuple(short packetType, int payload) { + super(packetType, PAYLOAD_LENGTH); + this.payload = payload; + } + + @Override + public byte[] payloadToWire() { + ByteBuffer bb = ByteBuffer.allocate(PAYLOAD_LENGTH).order(ByteOrder.LITTLE_ENDIAN); + bb.putInt(payload); + return bb.array(); + } +} diff --git a/moonlight-common/src/com/limelight/nvstream/control/ShortConfigTuple.java b/moonlight-common/src/com/limelight/nvstream/control/ShortConfigTuple.java new file mode 100644 index 00000000..7f684ca0 --- /dev/null +++ b/moonlight-common/src/com/limelight/nvstream/control/ShortConfigTuple.java @@ -0,0 +1,23 @@ +package com.limelight.nvstream.control; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +public class ShortConfigTuple extends ConfigTuple { + + public static final short PAYLOAD_LENGTH = 2; + + public short payload; + + public ShortConfigTuple(short packetType, short payload) { + super(packetType, PAYLOAD_LENGTH); + this.payload = payload; + } + + @Override + public byte[] payloadToWire() { + ByteBuffer bb = ByteBuffer.allocate(PAYLOAD_LENGTH).order(ByteOrder.LITTLE_ENDIAN); + bb.putShort(payload); + return bb.array(); + } +} \ No newline at end of file From a96de39b281e13acb2388eac70adba54de26775d Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 15:17:20 -0500 Subject: [PATCH 06/17] Use packet flags to determine where frames end and begin instead of the packet index --- .../nvstream/av/video/VideoDepacketizer.java | 19 ++++--------------- .../nvstream/av/video/VideoPacket.java | 13 ++++++++++--- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index 61feae5e..a102f8ec 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -14,7 +14,6 @@ public class VideoDepacketizer { private LinkedList avcNalDataChain = null; private int avcNalDataLength = 0; private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN; - private boolean splitFrame = false; // Sequencing state private short lastSequenceNumber; @@ -175,15 +174,9 @@ public class VideoDepacketizer { // Remove extra padding location.length = packet.getPayloadLength(); - boolean firstPacket = !splitFrame && packetIndex == 0; - - // Reset split frame state on next frame start - if (packetIndex == 0) { - splitFrame = false; - } - + boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0; if (firstPacket) - { + { if (NAL.getSpecialSequenceDescriptor(location, cachedDesc) && NAL.isAvcFrameStart(cachedDesc) && cachedDesc.data[cachedDesc.offset+cachedDesc.length] == 0x67) { @@ -195,12 +188,8 @@ public class VideoDepacketizer { addInputDataFast(packet, location, firstPacket); - if (!splitFrame && packetIndex + 1 == packetsInFrame) { - // Reassemble the frame if this was the last packet and it's not a split frame - if (packet.getPayloadLength() == 968) - splitFrame = true; - else - reassembleAvcNal(); + if ((packet.getFlags() & VideoPacket.FLAG_EOF) != 0) { + reassembleAvcNal(); } } diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java index a74da573..78b21c97 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoPacket.java @@ -12,6 +12,10 @@ public class VideoPacket { private int packetIndex; private int totalPackets; private int payloadLength; + private int flags; + + public static final int FLAG_EOF = 0x2; + public static final int FLAG_SOF = 0x4; public VideoPacket(ByteBufferDescriptor rtpPayload) { @@ -23,12 +27,15 @@ public class VideoPacket { frameIndex = bb.getInt(); packetIndex = bb.getInt(); totalPackets = bb.getInt(); - - bb.position(bb.position()+4); - + flags = bb.getInt(); payloadLength = bb.getInt(); } + public int getFlags() + { + return flags; + } + public int getFrameIndex() { return frameIndex; From 2d5083179c28c67f020b0b0e8dd156f72aa3710c Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 16:14:03 -0500 Subject: [PATCH 07/17] Revert "Remove depacketizer thread" This reverts commit a2a4463c0b684fa54212fe497ac2a8931ebd8821. --- .../nvstream/av/audio/AudioStream.java | 45 ++++++++++++++++--- .../nvstream/av/video/VideoStream.java | 44 ++++++++++++++++-- 2 files changed, 79 insertions(+), 10 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java index d1617dbc..fb7604e1 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java @@ -7,6 +7,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketException; import java.util.LinkedList; +import java.util.concurrent.LinkedBlockingQueue; import com.limelight.nvstream.NvConnectionListener; import com.limelight.nvstream.av.ByteBufferDescriptor; @@ -18,6 +19,8 @@ public class AudioStream { public static final int RTP_RECV_BUFFER = 64 * 1024; + private LinkedBlockingQueue packets = new LinkedBlockingQueue(100); + private DatagramSocket rtp; private AudioDepacketizer depacketizer = new AudioDepacketizer(); @@ -74,6 +77,8 @@ public class AudioStream { startReceiveThread(); + startDepacketizerThread(); + startDecoderThread(); startUdpPingThread(); @@ -99,13 +104,39 @@ public class AudioStream { streamListener.streamInitialized(OpusDecoder.getChannelCount(), OpusDecoder.getSampleRate()); } + private void startDepacketizerThread() + { + // This thread lessens the work on the receive thread + // so it can spend more time waiting for data + Thread t = new Thread() { + @Override + public void run() { + while (!isInterrupted()) + { + RtpPacket packet; + + try { + packet = packets.take(); + } catch (InterruptedException e) { + connListener.connectionTerminated(e); + return; + } + + depacketizer.decodeInputData(packet); + } + } + }; + threads.add(t); + t.setName("Audio - Depacketizer"); + t.start(); + } + private void startDecoderThread() { // Decoder thread Thread t = new Thread() { @Override public void run() { - while (!isInterrupted()) { ByteBufferDescriptor samples; @@ -118,7 +149,6 @@ public class AudioStream { } streamListener.playDecodedAudio(samples.data, samples.offset, samples.length); - } } }; @@ -140,14 +170,17 @@ public class AudioStream { { try { rtp.receive(packet); - desc.length = packet.getLength(); - depacketizer.decodeInputData(new RtpPacket(desc)); - desc.reinitialize(new byte[1500], 0, 1500); - packet.setData(desc.data, desc.offset, desc.length); } catch (IOException e) { connListener.connectionTerminated(e); return; } + + // Give the packet to the depacketizer thread + desc.length = packet.getLength(); + if (packets.offer(new RtpPacket(desc))) { + desc.reinitialize(new byte[1500], 0, 1500); + packet.setData(desc.data, desc.offset, desc.length); + } } } }; diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java index e69fe31a..7414e0ef 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java @@ -9,6 +9,7 @@ import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketException; import java.util.LinkedList; +import java.util.concurrent.LinkedBlockingQueue; import com.limelight.nvstream.NvConnectionListener; import com.limelight.nvstream.StreamConfiguration; @@ -25,6 +26,8 @@ public class VideoStream { public static final int FIRST_FRAME_TIMEOUT = 5000; public static final int RTP_RECV_BUFFER = 128 * 1024; + private LinkedBlockingQueue packets = new LinkedBlockingQueue(100); + private InetAddress host; private DatagramSocket rtp; private Socket firstFrameSocket; @@ -158,6 +161,9 @@ public class VideoStream { // early packets startReceiveThread(); + // Start the depacketizer thread to deal with the RTP data + startDepacketizerThread(); + // Start decoding the data we're receiving startDecoderThread(); @@ -194,6 +200,34 @@ public class VideoStream { t.start(); } + private void startDepacketizerThread() + { + // This thread lessens the work on the receive thread + // so it can spend more time waiting for data + Thread t = new Thread() { + @Override + public void run() { + while (!isInterrupted()) + { + RtpPacket packet; + + try { + packet = packets.take(); + } catch (InterruptedException e) { + listener.connectionTerminated(e); + return; + } + + // !!! We no longer own the data buffer at this point !!! + depacketizer.addInputData(packet); + } + } + }; + threads.add(t); + t.setName("Video - Depacketizer"); + t.start(); + } + private void startReceiveThread() { // Receive thread @@ -207,15 +241,17 @@ public class VideoStream { { try { rtp.receive(packet); - desc.length = packet.getLength(); - depacketizer.addInputData(new RtpPacket(desc)); - desc.reinitialize(new byte[1500], 0, 1500); - packet.setData(desc.data, desc.offset, desc.length); } catch (IOException e) { listener.connectionTerminated(e); return; } + // Give the packet to the depacketizer thread + desc.length = packet.getLength(); + if (packets.offer(new RtpPacket(desc))) { + desc.reinitialize(new byte[1500], 0, 1500); + packet.setData(desc.data, desc.offset, desc.length); + } } } }; From 29dd0e172cd4854180efe95cd8f0253dab8e9cb5 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 19:18:34 -0500 Subject: [PATCH 08/17] Fix the new config tuples to work with our current control stream code --- .../limelight/nvstream/control/Config.java | 49 ++++++++++++------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/control/Config.java b/moonlight-common/src/com/limelight/nvstream/control/Config.java index bfd21b52..94717bf1 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/Config.java +++ b/moonlight-common/src/com/limelight/nvstream/control/Config.java @@ -2,6 +2,7 @@ package com.limelight.nvstream.control; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.ArrayList; import java.util.HashSet; import com.limelight.nvstream.StreamConfiguration; @@ -10,7 +11,6 @@ public class Config { public static final ConfigTuple[] CONFIG_720_60 = { - new IntConfigTuple((short)0x1206, 1), new ByteConfigTuple((short)0x1207, (byte)1), new IntConfigTuple((short)0x120b, 7), new IntConfigTuple((short)0x120c, 7), @@ -68,13 +68,13 @@ public class Config { new IntConfigTuple((short)0x1233, 3000), new IntConfigTuple((short)0x122c, 3), new IntConfigTuple((short)0x122d, 10), - new IntConfigTuple((short)0x123b, 12), + /*new IntConfigTuple((short)0x123b, 12), new IntConfigTuple((short)0x123c, 3), new IntConfigTuple((short)0x1249, 0), new IntConfigTuple((short)0x124a, 4000), new IntConfigTuple((short)0x124b, 5000), new IntConfigTuple((short)0x124c, 6000), - new IntConfigTuple((short)0x124d, 1000), + new IntConfigTuple((short)0x124d, 1000),*/ new IntConfigTuple((short)0x122f, 0), new ShortConfigTuple((short)0x1230, (short)0), new IntConfigTuple((short)0x1239, 0), @@ -93,8 +93,8 @@ public class Config { new IntConfigTuple((short)0x1245, 3000), new IntConfigTuple((short)0x1246, 1280), new IntConfigTuple((short)0x1247, 720), - new IntConfigTuple((short)0x124a, 5000), - new IntConfigTuple((short)0x124c, 7000), + /*new IntConfigTuple((short)0x124a, 5000), + new IntConfigTuple((short)0x124c, 7000),*/ }; public static final ConfigTuple[] CONFIG_1080_60_DIFF = @@ -107,8 +107,8 @@ public class Config { new IntConfigTuple((short)0x1245, 3000), new IntConfigTuple((short)0x1246, 1280), new IntConfigTuple((short)0x1247, 720), - new IntConfigTuple((short)0x124a, 5000), - new IntConfigTuple((short)0x124c, 7000), + /*new IntConfigTuple((short)0x124a, 5000), + new IntConfigTuple((short)0x124c, 7000),*/ }; private StreamConfiguration streamConfig; @@ -117,18 +117,28 @@ public class Config { this.streamConfig = streamConfig; } - private void updateSetWithConfig(HashSet set, ConfigTuple[] config) + private void updateSetWithConfig(ArrayList set, ConfigTuple[] config) { for (ConfigTuple tuple : config) { - // Remove any existing tuple of this type - set.remove(tuple); + int i; - set.add(tuple); + for (i = 0; i < set.size(); i++) { + ConfigTuple existingTuple = set.get(i); + if (existingTuple.packetType == tuple.packetType) { + set.remove(i); + set.add(i, tuple); + break; + } + } + + if (i == set.size()) { + set.add(tuple); + } } } - private int getConfigOnWireSize(HashSet tupleSet) + private int getConfigOnWireSize(ArrayList tupleSet) { int size = 0; @@ -140,8 +150,13 @@ public class Config { return size; } - private HashSet generateTupleSet() { - HashSet tupleSet = new HashSet(); + private ArrayList generateTupleSet() { + ArrayList tupleSet = new ArrayList(); + + tupleSet.add(new IntConfigTuple((short)0x1204, streamConfig.getWidth())); + tupleSet.add(new IntConfigTuple((short)0x1205, streamConfig.getHeight())); + tupleSet.add(new IntConfigTuple((short)0x1206, 1)); + tupleSet.add(new IntConfigTuple((short)0x120A, streamConfig.getRefreshRate())); // Start with the initial config for 720p60 updateSetWithConfig(tupleSet, CONFIG_720_60); @@ -161,15 +176,11 @@ public class Config { } } - tupleSet.add(new IntConfigTuple((short)0x1204, streamConfig.getWidth())); - tupleSet.add(new IntConfigTuple((short)0x1205, streamConfig.getHeight())); - tupleSet.add(new IntConfigTuple((short)0x120A, streamConfig.getRefreshRate())); - return tupleSet; } public byte[] toWire() { - HashSet tupleSet = generateTupleSet(); + ArrayList tupleSet = generateTupleSet(); ByteBuffer bb = ByteBuffer.allocate(getConfigOnWireSize(tupleSet) + 4).order(ByteOrder.LITTLE_ENDIAN); for (ConfigTuple t : tupleSet) From 21116f90a77708828aaa1ea4d74ff0cc3fec387f Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 17 Feb 2014 19:22:49 -0500 Subject: [PATCH 09/17] Remove unused imports --- moonlight-common/src/com/limelight/nvstream/control/Config.java | 1 - 1 file changed, 1 deletion(-) diff --git a/moonlight-common/src/com/limelight/nvstream/control/Config.java b/moonlight-common/src/com/limelight/nvstream/control/Config.java index 94717bf1..ac906e5e 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/Config.java +++ b/moonlight-common/src/com/limelight/nvstream/control/Config.java @@ -3,7 +3,6 @@ package com.limelight.nvstream.control; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; -import java.util.HashSet; import com.limelight.nvstream.StreamConfiguration; From cdf634dc4134ad8b335f7ec2d4d311f87c17be54 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Wed, 19 Feb 2014 19:03:51 -0500 Subject: [PATCH 10/17] Display messages if we detect that the device is having issues streaming --- .../nvstream/NvConnectionListener.java | 1 + .../nvstream/av/ConnectionStatusListener.java | 4 ++- .../nvstream/av/video/VideoDepacketizer.java | 6 ++-- .../nvstream/control/ControlStream.java | 36 ++++++++++++++++++- 4 files changed, 42 insertions(+), 5 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/NvConnectionListener.java b/moonlight-common/src/com/limelight/nvstream/NvConnectionListener.java index 0fb63dc2..f7b80f82 100644 --- a/moonlight-common/src/com/limelight/nvstream/NvConnectionListener.java +++ b/moonlight-common/src/com/limelight/nvstream/NvConnectionListener.java @@ -29,4 +29,5 @@ public interface NvConnectionListener { public void connectionTerminated(Exception e); public void displayMessage(String message); + public void displayTransientMessage(String message); } diff --git a/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java b/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java index 35262ddf..fe58cf63 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java +++ b/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java @@ -3,5 +3,7 @@ package com.limelight.nvstream.av; public interface ConnectionStatusListener { public void connectionTerminated(); - public void connectionNeedsResync(); + public void connectionDetectedPacketLoss(); + + public void connectionSinkTooSlow(); } diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index a102f8ec..60ea4852 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -23,7 +23,7 @@ public class VideoDepacketizer { private ConnectionStatusListener controlListener; - private static final int DU_LIMIT = 15; + private static final int DU_LIMIT = 7; private LinkedBlockingQueue decodedUnits = new LinkedBlockingQueue(DU_LIMIT); public VideoDepacketizer(ConnectionStatusListener controlListener) @@ -47,7 +47,7 @@ public class VideoDepacketizer { // We need a new IDR frame since we're discarding data now System.out.println("Video decoder is too slow! Forced to drop decode units"); decodedUnits.clear(); - controlListener.connectionNeedsResync(); + controlListener.connectionSinkTooSlow(); } // Clear old state @@ -208,7 +208,7 @@ public class VideoDepacketizer { clearAvcNalState(); // Request an IDR frame - controlListener.connectionNeedsResync(); + controlListener.connectionDetectedPacketLoss(); } lastSequenceNumber = seq; diff --git a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java index 9d40ded5..cdb7e945 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java +++ b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java @@ -44,6 +44,15 @@ public class ControlStream implements ConnectionStatusListener { private InetAddress host; private Config config; + public static final int LOSS_PERIOD_MS = 5000; + public static final int MAX_LOSS_COUNT_IN_PERIOD = 5; + public static final int MAX_SLOW_SINK_COUNT = 3; + public static final int MESSAGE_DELAY_FACTOR = 5; + + private long lossTimestamp; + private int lossCount; + private int slowSinkCount; + private Socket s; private InputStream in; private OutputStream out; @@ -404,10 +413,35 @@ public class ControlStream implements ConnectionStatusListener { abort(); } - public void connectionNeedsResync() { + private void resyncConnection() { synchronized (resyncNeeded) { // Wake up the resync thread resyncNeeded.notify(); } } + + public void connectionDetectedPacketLoss() { + if (System.currentTimeMillis() > LOSS_PERIOD_MS + lossTimestamp) { + lossCount++; + lossTimestamp = System.currentTimeMillis(); + } + else { + if (++lossCount == MAX_LOSS_COUNT_IN_PERIOD) { + listener.displayTransientMessage("Detected excessive A/V data loss. Try improving your network connection or lowering stream settings."); + lossCount = -MAX_LOSS_COUNT_IN_PERIOD * MESSAGE_DELAY_FACTOR; + lossTimestamp = 0; + } + } + + resyncConnection(); + } + + public void connectionSinkTooSlow() { + if (++slowSinkCount == MAX_SLOW_SINK_COUNT) { + listener.displayTransientMessage("Your device is processing the A/V data too slowly. Try lowering stream settings."); + slowSinkCount = -MAX_SLOW_SINK_COUNT * MESSAGE_DELAY_FACTOR; + } + + resyncConnection(); + } } From cf3ac50d22341026658718dad2c36d2ac3b9efb4 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Wed, 19 Feb 2014 20:36:12 -0500 Subject: [PATCH 11/17] Increase the propensity for generating a loss warning --- .../src/com/limelight/nvstream/control/ControlStream.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java index cdb7e945..ea473f36 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java +++ b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java @@ -44,8 +44,8 @@ public class ControlStream implements ConnectionStatusListener { private InetAddress host; private Config config; - public static final int LOSS_PERIOD_MS = 5000; - public static final int MAX_LOSS_COUNT_IN_PERIOD = 5; + public static final int LOSS_PERIOD_MS = 15000; + public static final int MAX_LOSS_COUNT_IN_PERIOD = 3; public static final int MAX_SLOW_SINK_COUNT = 3; public static final int MESSAGE_DELAY_FACTOR = 5; From 63ee6ef79a657547d2f4d83746e9a00080ea2af4 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Wed, 19 Feb 2014 20:36:53 -0500 Subject: [PATCH 12/17] Add support for direct submission of buffers to the renderers without a separate thread --- .../nvstream/av/audio/AudioDepacketizer.java | 15 ++++++++++++--- .../nvstream/av/audio/AudioRenderer.java | 5 +++++ .../limelight/nvstream/av/audio/AudioStream.java | 13 +++++++++++-- .../nvstream/av/video/VideoDecoderRenderer.java | 5 +++++ .../nvstream/av/video/VideoDepacketizer.java | 12 +++++++++--- .../limelight/nvstream/av/video/VideoStream.java | 16 +++++++++++++--- 6 files changed, 55 insertions(+), 11 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioDepacketizer.java index bfff091f..f7dd583d 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioDepacketizer.java @@ -10,9 +10,16 @@ public class AudioDepacketizer { private static final int DU_LIMIT = 15; private LinkedBlockingQueue decodedUnits = new LinkedBlockingQueue(DU_LIMIT); - + + private AudioRenderer directSubmitRenderer; + // Sequencing state private short lastSequenceNumber; + + public AudioDepacketizer(AudioRenderer directSubmitRenderer) + { + this.directSubmitRenderer = directSubmitRenderer; + } private void decodeData(byte[] data, int off, int len) { @@ -24,8 +31,10 @@ public class AudioDepacketizer { // Return value of decode is frames (shorts) decoded per channel decodeLen *= 2*OpusDecoder.getChannelCount(); - // Put it on the decoded queue - if (!decodedUnits.offer(new ByteBufferDescriptor(pcmData, 0, decodeLen))) { + if (directSubmitRenderer != null) { + directSubmitRenderer.playDecodedAudio(pcmData, 0, decodeLen); + } + else if (!decodedUnits.offer(new ByteBufferDescriptor(pcmData, 0, decodeLen))) { System.out.println("Audio player too slow! Forced to drop decoded samples"); // Clear out the queue decodedUnits.clear(); diff --git a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioRenderer.java b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioRenderer.java index a7125998..48fb88af 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioRenderer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioRenderer.java @@ -1,6 +1,11 @@ package com.limelight.nvstream.av.audio; public interface AudioRenderer { + // playDecodedAudio() is lightweight, so don't use an extra thread for playback + public static final int CAPABILITY_DIRECT_SUBMIT = 0x1; + + public int getCapabilities(); + public void streamInitialized(int channelCount, int sampleRate); public void playDecodedAudio(byte[] audioData, int offset, int length); diff --git a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java index fb7604e1..3ae8c5e6 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java @@ -23,7 +23,7 @@ public class AudioStream { private DatagramSocket rtp; - private AudioDepacketizer depacketizer = new AudioDepacketizer(); + private AudioDepacketizer depacketizer; private LinkedList threads = new LinkedList(); @@ -79,7 +79,9 @@ public class AudioStream { startDepacketizerThread(); - startDecoderThread(); + if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) { + startDecoderThread(); + } startUdpPingThread(); } @@ -102,6 +104,13 @@ public class AudioStream { } streamListener.streamInitialized(OpusDecoder.getChannelCount(), OpusDecoder.getSampleRate()); + + if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) != 0) { + depacketizer = new AudioDepacketizer(streamListener); + } + else { + depacketizer = new AudioDepacketizer(null); + } } private void startDepacketizerThread() diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDecoderRenderer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDecoderRenderer.java index 7fe0be00..335c53d2 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDecoderRenderer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDecoderRenderer.java @@ -6,6 +6,11 @@ public interface VideoDecoderRenderer { public static final int FLAG_PREFER_QUALITY = 0x1; public static final int FLAG_FORCE_HARDWARE_DECODING = 0x2; public static final int FLAG_FORCE_SOFTWARE_DECODING = 0x4; + + // SubmitDecodeUnit() is lightweight, so don't use an extra thread for decoding + public static final int CAPABILITY_DIRECT_SUBMIT = 0x1; + + public int getCapabilities(); public void setup(int width, int height, int redrawRate, Object renderTarget, int drFlags); diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index 60ea4852..4215ae6b 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -22,12 +22,14 @@ public class VideoDepacketizer { private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0); private ConnectionStatusListener controlListener; + private VideoDecoderRenderer directSubmitDr; - private static final int DU_LIMIT = 7; + private static final int DU_LIMIT = 15; private LinkedBlockingQueue decodedUnits = new LinkedBlockingQueue(DU_LIMIT); - public VideoDepacketizer(ConnectionStatusListener controlListener) + public VideoDepacketizer(VideoDecoderRenderer directSubmitDr, ConnectionStatusListener controlListener) { + this.directSubmitDr = directSubmitDr; this.controlListener = controlListener; } @@ -43,7 +45,11 @@ public class VideoDepacketizer { if (avcNalDataChain != null && avcNalDataLength != 0) { // Construct the H264 decode unit DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcNalDataChain, avcNalDataLength, 0); - if (!decodedUnits.offer(du)) { + if (directSubmitDr != null) { + // Submit directly to the decoder + directSubmitDr.submitDecodeUnit(du); + } + else if (!decodedUnits.offer(du)) { // We need a new IDR frame since we're discarding data now System.out.println("Video decoder is too slow! Forced to drop decode units"); decodedUnits.clear(); diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java index 7414e0ef..b197f64d 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java @@ -35,6 +35,7 @@ public class VideoStream { private LinkedList threads = new LinkedList(); private NvConnectionListener listener; + private ConnectionStatusListener avConnListener; private VideoDepacketizer depacketizer; private StreamConfiguration streamConfig; @@ -47,7 +48,7 @@ public class VideoStream { { this.host = host; this.listener = listener; - this.depacketizer = new VideoDepacketizer(avConnListener); + this.avConnListener = avConnListener; this.streamConfig = streamConfig; } @@ -134,6 +135,13 @@ public class VideoStream { if (decRend != null) { decRend.setup(streamConfig.getWidth(), streamConfig.getHeight(), 60, renderTarget, drFlags); + + if ((decRend.getCapabilities() & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) != 0) { + depacketizer = new VideoDepacketizer(decRend, avConnListener); + } + else { + depacketizer = new VideoDepacketizer(null, avConnListener); + } } } @@ -164,8 +172,10 @@ public class VideoStream { // Start the depacketizer thread to deal with the RTP data startDepacketizerThread(); - // Start decoding the data we're receiving - startDecoderThread(); + // Start a decode thread if we're not doing direct submit + if ((decRend.getCapabilities() & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) { + startDecoderThread(); + } // Start the renderer decRend.start(); From ccc3eeebe85b3eec12e03a3bdba371ce76165332 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Wed, 19 Feb 2014 20:59:31 -0500 Subject: [PATCH 13/17] Remove the depacketizer thread again... --- .../nvstream/av/audio/AudioStream.java | 45 +++---------------- .../nvstream/av/video/VideoStream.java | 44 ++---------------- 2 files changed, 10 insertions(+), 79 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java index 3ae8c5e6..11567487 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/audio/AudioStream.java @@ -7,7 +7,6 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketException; import java.util.LinkedList; -import java.util.concurrent.LinkedBlockingQueue; import com.limelight.nvstream.NvConnectionListener; import com.limelight.nvstream.av.ByteBufferDescriptor; @@ -19,8 +18,6 @@ public class AudioStream { public static final int RTP_RECV_BUFFER = 64 * 1024; - private LinkedBlockingQueue packets = new LinkedBlockingQueue(100); - private DatagramSocket rtp; private AudioDepacketizer depacketizer; @@ -77,8 +74,6 @@ public class AudioStream { startReceiveThread(); - startDepacketizerThread(); - if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) { startDecoderThread(); } @@ -113,39 +108,13 @@ public class AudioStream { } } - private void startDepacketizerThread() - { - // This thread lessens the work on the receive thread - // so it can spend more time waiting for data - Thread t = new Thread() { - @Override - public void run() { - while (!isInterrupted()) - { - RtpPacket packet; - - try { - packet = packets.take(); - } catch (InterruptedException e) { - connListener.connectionTerminated(e); - return; - } - - depacketizer.decodeInputData(packet); - } - } - }; - threads.add(t); - t.setName("Audio - Depacketizer"); - t.start(); - } - private void startDecoderThread() { // Decoder thread Thread t = new Thread() { @Override public void run() { + while (!isInterrupted()) { ByteBufferDescriptor samples; @@ -158,6 +127,7 @@ public class AudioStream { } streamListener.playDecodedAudio(samples.data, samples.offset, samples.length); + } } }; @@ -179,17 +149,14 @@ public class AudioStream { { try { rtp.receive(packet); + desc.length = packet.getLength(); + depacketizer.decodeInputData(new RtpPacket(desc)); + desc.reinitialize(new byte[1500], 0, 1500); + packet.setData(desc.data, desc.offset, desc.length); } catch (IOException e) { connListener.connectionTerminated(e); return; } - - // Give the packet to the depacketizer thread - desc.length = packet.getLength(); - if (packets.offer(new RtpPacket(desc))) { - desc.reinitialize(new byte[1500], 0, 1500); - packet.setData(desc.data, desc.offset, desc.length); - } } } }; diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java index b197f64d..cf2b91ca 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoStream.java @@ -9,7 +9,6 @@ import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketException; import java.util.LinkedList; -import java.util.concurrent.LinkedBlockingQueue; import com.limelight.nvstream.NvConnectionListener; import com.limelight.nvstream.StreamConfiguration; @@ -26,8 +25,6 @@ public class VideoStream { public static final int FIRST_FRAME_TIMEOUT = 5000; public static final int RTP_RECV_BUFFER = 128 * 1024; - private LinkedBlockingQueue packets = new LinkedBlockingQueue(100); - private InetAddress host; private DatagramSocket rtp; private Socket firstFrameSocket; @@ -169,9 +166,6 @@ public class VideoStream { // early packets startReceiveThread(); - // Start the depacketizer thread to deal with the RTP data - startDepacketizerThread(); - // Start a decode thread if we're not doing direct submit if ((decRend.getCapabilities() & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) { startDecoderThread(); @@ -210,34 +204,6 @@ public class VideoStream { t.start(); } - private void startDepacketizerThread() - { - // This thread lessens the work on the receive thread - // so it can spend more time waiting for data - Thread t = new Thread() { - @Override - public void run() { - while (!isInterrupted()) - { - RtpPacket packet; - - try { - packet = packets.take(); - } catch (InterruptedException e) { - listener.connectionTerminated(e); - return; - } - - // !!! We no longer own the data buffer at this point !!! - depacketizer.addInputData(packet); - } - } - }; - threads.add(t); - t.setName("Video - Depacketizer"); - t.start(); - } - private void startReceiveThread() { // Receive thread @@ -251,17 +217,15 @@ public class VideoStream { { try { rtp.receive(packet); + desc.length = packet.getLength(); + depacketizer.addInputData(new RtpPacket(desc)); + desc.reinitialize(new byte[1500], 0, 1500); + packet.setData(desc.data, desc.offset, desc.length); } catch (IOException e) { listener.connectionTerminated(e); return; } - // Give the packet to the depacketizer thread - desc.length = packet.getLength(); - if (packets.offer(new RtpPacket(desc))) { - desc.reinitialize(new byte[1500], 0, 1500); - packet.setData(desc.data, desc.offset, desc.length); - } } } }; From bc2ca0b38617225eb02fcd5fa71b6cef5315f1c8 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Mon, 24 Feb 2014 12:54:03 -0500 Subject: [PATCH 14/17] Increase resilience to packet loss. IDR frames are no longer requested if error correction data was lost. A maximum of one IDR frame is requested per corrupt frame. Error correction data is used to recover from the loss of a single-packet frame. --- .../nvstream/av/ConnectionStatusListener.java | 4 +- .../com/limelight/nvstream/av/DecodeUnit.java | 9 +- .../nvstream/av/video/VideoDepacketizer.java | 148 ++++++++++++------ .../nvstream/control/ControlStream.java | 56 ++++--- 4 files changed, 142 insertions(+), 75 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java b/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java index fe58cf63..2111f56b 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java +++ b/moonlight-common/src/com/limelight/nvstream/av/ConnectionStatusListener.java @@ -3,7 +3,7 @@ package com.limelight.nvstream.av; public interface ConnectionStatusListener { public void connectionTerminated(); - public void connectionDetectedPacketLoss(); + public void connectionDetectedFrameLoss(int firstLostFrame, int lastLostFrame); - public void connectionSinkTooSlow(); + public void connectionSinkTooSlow(int firstLostFrame, int lastLostFrame); } diff --git a/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java b/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java index f58510e0..369de8da 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java +++ b/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java @@ -11,13 +11,15 @@ public class DecodeUnit { private List bufferList; private int dataLength; private int flags; + private int frameNumber; - public DecodeUnit(int type, List bufferList, int dataLength, int flags) + public DecodeUnit(int type, List bufferList, int dataLength, int flags, int frameNumber) { this.type = type; this.bufferList = bufferList; this.dataLength = dataLength; this.flags = flags; + this.frameNumber = frameNumber; } public int getType() @@ -39,4 +41,9 @@ public class DecodeUnit { { return dataLength; } + + public int getFrameNumber() + { + return frameNumber; + } } diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index 4215ae6b..7a8a867a 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -10,13 +10,16 @@ import com.limelight.nvstream.av.ConnectionStatusListener; public class VideoDepacketizer { - // Current NAL state - private LinkedList avcNalDataChain = null; - private int avcNalDataLength = 0; + // Current frame state + private LinkedList avcFrameDataChain = null; + private int avcFrameDataLength = 0; private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN; // Sequencing state - private short lastSequenceNumber; + private int nextFrameNumber = 1; + private int nextPacketNumber; + private int startFrameNumber = 1; + private boolean waitingForFrameStart; // Cached objects private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0); @@ -33,31 +36,31 @@ public class VideoDepacketizer { this.controlListener = controlListener; } - private void clearAvcNalState() + private void clearAvcFrameState() { - avcNalDataChain = null; - avcNalDataLength = 0; + avcFrameDataChain = null; + avcFrameDataLength = 0; } - private void reassembleAvcNal() + private void reassembleAvcFrame(int frameNumber) { - // This is the start of a new NAL - if (avcNalDataChain != null && avcNalDataLength != 0) { + // This is the start of a new frame + if (avcFrameDataChain != null && avcFrameDataLength != 0) { // Construct the H264 decode unit - DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcNalDataChain, avcNalDataLength, 0); + DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcFrameDataChain, avcFrameDataLength, 0, frameNumber); if (directSubmitDr != null) { // Submit directly to the decoder directSubmitDr.submitDecodeUnit(du); } else if (!decodedUnits.offer(du)) { - // We need a new IDR frame since we're discarding data now System.out.println("Video decoder is too slow! Forced to drop decode units"); + // Invalidate all frames from the start of the DU queue to this frame number + controlListener.connectionSinkTooSlow(decodedUnits.remove().getFrameNumber(), frameNumber); decodedUnits.clear(); - controlListener.connectionSinkTooSlow(); } // Clear old state - clearAvcNalState(); + clearAvcFrameState(); } } @@ -80,11 +83,11 @@ public class VideoDepacketizer { if (NAL.isAvcFrameStart(cachedDesc)) { // Reassemble any pending AVC NAL - reassembleAvcNal(); + reassembleAvcFrame(packet.getFrameIndex()); // Setup state for the new NAL - avcNalDataChain = new LinkedList(); - avcNalDataLength = 0; + avcFrameDataChain = new LinkedList(); + avcFrameDataLength = 0; } // Skip the start sequence @@ -97,7 +100,7 @@ public class VideoDepacketizer { if (currentlyDecoding == DecodeUnit.TYPE_H264 && NAL.isPadding(cachedDesc)) { // The decode unit is complete - reassembleAvcNal(); + reassembleAvcFrame(packet.getFrameIndex()); } // Not decoding AVC @@ -133,13 +136,13 @@ public class VideoDepacketizer { location.length--; } - if (currentlyDecoding == DecodeUnit.TYPE_H264 && avcNalDataChain != null) + if (currentlyDecoding == DecodeUnit.TYPE_H264 && avcFrameDataChain != null) { ByteBufferDescriptor data = new ByteBufferDescriptor(location.data, start, location.offset-start); // Add a buffer descriptor describing the NAL data in this packet - avcNalDataChain.add(data); - avcNalDataLength += location.offset-start; + avcFrameDataChain.add(data); + avcFrameDataLength += location.offset-start; } } } @@ -148,15 +151,13 @@ public class VideoDepacketizer { { if (firstPacket) { // Setup state for the new frame - avcNalDataChain = new LinkedList(); - avcNalDataLength = 0; + avcFrameDataChain = new LinkedList(); + avcFrameDataLength = 0; } // Add the payload data to the chain - if (avcNalDataChain != null) { - avcNalDataChain.add(location); - avcNalDataLength += location.length; - } + avcFrameDataChain.add(location); + avcFrameDataLength += location.length; } public void addInputData(VideoPacket packet) @@ -164,29 +165,87 @@ public class VideoDepacketizer { ByteBufferDescriptor location = packet.getNewPayloadDescriptor(); // Runt packets get decoded using the slow path + // These packets stand alone so there's no need to verify + // sequencing before submitting if (location.length < 968) { addInputDataSlow(packet, location); return; } + int frameIndex = packet.getFrameIndex(); int packetIndex = packet.getPacketIndex(); int packetsInFrame = packet.getTotalPackets(); + // We can use FEC to correct single packet errors + // on single packet frames because we just get a + // duplicate of the original packet + if (packetsInFrame == 1 && packetIndex == 1 && + nextPacketNumber == 0 && frameIndex == nextFrameNumber) { + System.out.println("Using FEC for error correction"); + nextPacketNumber = 1; + } // Discard FEC data early - if (packetIndex >= packetsInFrame) { + else if (packetIndex >= packetsInFrame) { return; } + // Check that this is the next frame + boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0; + if (firstPacket && waitingForFrameStart) { + // This is the next frame after a loss event + controlListener.connectionDetectedFrameLoss(startFrameNumber, frameIndex - 1); + startFrameNumber = nextFrameNumber = frameIndex; + nextPacketNumber = 0; + waitingForFrameStart = false; + clearAvcFrameState(); + } + else if (frameIndex > nextFrameNumber) { + // Nope, but we can still work with it if it's + // the start of the next frame + if (firstPacket) { + System.out.println("Got start of frame "+frameIndex+ + " when expecting packet "+nextPacketNumber+ + " of frame "+nextFrameNumber); + controlListener.connectionDetectedFrameLoss(startFrameNumber, frameIndex - 1); + startFrameNumber = nextFrameNumber = frameIndex; + nextPacketNumber = 0; + clearAvcFrameState(); + } + else { + System.out.println("Got packet "+packetIndex+" of frame "+frameIndex+ + " when expecting packet "+nextPacketNumber+ + " of frame "+nextFrameNumber); + // We dropped the start of this frame too, so pick up on the next frame + waitingForFrameStart = true; + return; + } + } + else if (frameIndex < nextFrameNumber) { + System.out.println("Frame "+frameIndex+" is behind our current frame number "+nextFrameNumber); + // Discard the frame silently if it's behind our current sequence number + return; + } + + // We know it's the right frame, now check the packet number + if (packetIndex != nextPacketNumber) { + System.out.println("Frame "+frameIndex+": expected packet "+nextPacketNumber+" but got "+packetIndex); + // At this point, we're guaranteed that it's not FEC data that we lost + waitingForFrameStart = true; + return; + } + + nextPacketNumber++; + // Remove extra padding location.length = packet.getPayloadLength(); - - boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0; + if (firstPacket) { if (NAL.getSpecialSequenceDescriptor(location, cachedDesc) && NAL.isAvcFrameStart(cachedDesc) && cachedDesc.data[cachedDesc.offset+cachedDesc.length] == 0x67) { // SPS and PPS prefix is padded between NALs, so we must decode it with the slow path + clearAvcFrameState(); addInputDataSlow(packet, location); return; } @@ -194,32 +253,21 @@ public class VideoDepacketizer { addInputDataFast(packet, location, firstPacket); + // We can't use the EOF flag here because real frames can be split across + // multiple "frames" when packetized to fit under the bandwidth ceiling + if (packetIndex + 1 >= packetsInFrame) { + nextFrameNumber++; + nextPacketNumber = 0; + } + if ((packet.getFlags() & VideoPacket.FLAG_EOF) != 0) { - reassembleAvcNal(); + reassembleAvcFrame(packet.getFrameIndex()); + startFrameNumber = nextFrameNumber; } } public void addInputData(RtpPacket packet) { - short seq = packet.getSequenceNumber(); - - // Toss out the current NAL if we receive a packet that is - // out of sequence - if (lastSequenceNumber != 0 && - (short)(lastSequenceNumber + 1) != seq) - { - System.out.println("Received OOS video data (expected "+(lastSequenceNumber + 1)+", got "+seq+")"); - - // Reset the depacketizer state - clearAvcNalState(); - - // Request an IDR frame - controlListener.connectionDetectedPacketLoss(); - } - - lastSequenceNumber = seq; - - // Pass the payload to the non-sequencing parser ByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor(); addInputData(new VideoPacket(rtpPayload)); } diff --git a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java index ea473f36..259ef2a2 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java +++ b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java @@ -8,6 +8,7 @@ import java.net.InetSocketAddress; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.concurrent.LinkedBlockingQueue; import com.limelight.nvstream.NvConnectionListener; import com.limelight.nvstream.StreamConfiguration; @@ -60,7 +61,7 @@ public class ControlStream implements ConnectionStatusListener { private Thread heartbeatThread; private Thread jitterThread; private Thread resyncThread; - private Object resyncNeeded = new Object(); + private LinkedBlockingQueue invalidReferenceFrameTuples = new LinkedBlockingQueue(); private boolean aborting = false; public ControlStream(InetAddress host, NvConnectionListener listener, StreamConfiguration streamConfig) @@ -140,12 +141,6 @@ public class ControlStream implements ConnectionStatusListener { } } - public void requestResync() throws IOException - { - System.out.println("CTL: Requesting IDR frame"); - sendResync(); - } - public void start() throws IOException { // Use a finite timeout during the handshake process @@ -188,18 +183,36 @@ public class ControlStream implements ConnectionStatusListener { public void run() { while (!isInterrupted()) { + int[] tuple; + + // Wait for a tuple try { - // Wait for notification of a resync needed - synchronized (resyncNeeded) { - resyncNeeded.wait(); - } + tuple = invalidReferenceFrameTuples.take(); } catch (InterruptedException e) { listener.connectionTerminated(e); return; } + // Aggregate all lost frames into one range + int[] lastTuple = null; + for (;;) { + int[] nextTuple = lastTuple = invalidReferenceFrameTuples.poll(); + if (nextTuple == null) { + break; + } + + lastTuple = nextTuple; + } + + // Update the end of the range to the latest tuple + if (lastTuple != null) { + tuple[1] = lastTuple[1]; + } + try { - requestResync(); + System.err.println("Invalidating reference frames from "+tuple[0]+" to "+tuple[1]); + ControlStream.this.sendResync(tuple[0], tuple[1]); + System.err.println("Frames invalidated"); } catch (IOException e) { listener.connectionTerminated(e); return; @@ -243,12 +256,14 @@ public class ControlStream implements ConnectionStatusListener { return sendAndGetReply(new NvCtlPacket(PTYPE_1405, PPAYLEN_1405)); } - private void sendResync() throws IOException + private void sendResync(int firstLostFrame, int lastLostFrame) throws IOException { ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLEN_RESYNC]).order(ByteOrder.LITTLE_ENDIAN); conf.putLong(0); conf.putLong(0xFFFFF); + //conf.putLong(firstLostFrame); + //conf.putLong(lastLostFrame); sendAndGetReply(new NvCtlPacket(PTYPE_RESYNC, PPAYLEN_RESYNC, conf.array())); } @@ -413,14 +428,11 @@ public class ControlStream implements ConnectionStatusListener { abort(); } - private void resyncConnection() { - synchronized (resyncNeeded) { - // Wake up the resync thread - resyncNeeded.notify(); - } + private void resyncConnection(int firstLostFrame, int lastLostFrame) { + invalidReferenceFrameTuples.add(new int[]{firstLostFrame, lastLostFrame}); } - public void connectionDetectedPacketLoss() { + public void connectionDetectedFrameLoss(int firstLostFrame, int lastLostFrame) { if (System.currentTimeMillis() > LOSS_PERIOD_MS + lossTimestamp) { lossCount++; lossTimestamp = System.currentTimeMillis(); @@ -433,15 +445,15 @@ public class ControlStream implements ConnectionStatusListener { } } - resyncConnection(); + resyncConnection(firstLostFrame, lastLostFrame); } - public void connectionSinkTooSlow() { + public void connectionSinkTooSlow(int firstLostFrame, int lastLostFrame) { if (++slowSinkCount == MAX_SLOW_SINK_COUNT) { listener.displayTransientMessage("Your device is processing the A/V data too slowly. Try lowering stream settings."); slowSinkCount = -MAX_SLOW_SINK_COUNT * MESSAGE_DELAY_FACTOR; } - resyncConnection(); + resyncConnection(firstLostFrame, lastLostFrame); } } From 4fbe93e62dc429f3913a77fb4f50cae5407d1810 Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Wed, 26 Feb 2014 01:00:17 -0500 Subject: [PATCH 15/17] Set flags on the decode units that indicate what type of data the frame contains --- .../com/limelight/nvstream/av/DecodeUnit.java | 3 +++ .../nvstream/av/video/VideoDepacketizer.java | 19 ++++++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java b/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java index 369de8da..b09ffbf5 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java +++ b/moonlight-common/src/com/limelight/nvstream/av/DecodeUnit.java @@ -7,6 +7,9 @@ public class DecodeUnit { public static final int TYPE_H264 = 1; public static final int TYPE_OPUS = 2; + public static final int DU_FLAG_CODEC_CONFIG = 0x1; + public static final int DU_FLAG_SYNC_FRAME = 0x2; + private int type; private List bufferList; private int dataLength; diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index 7a8a867a..c1ef5dd4 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -46,8 +46,25 @@ public class VideoDepacketizer { { // This is the start of a new frame if (avcFrameDataChain != null && avcFrameDataLength != 0) { + int flags = 0; + + ByteBufferDescriptor firstBuffer = avcFrameDataChain.getFirst(); + + if (NAL.getSpecialSequenceDescriptor(firstBuffer, cachedDesc) && NAL.isAvcFrameStart(cachedDesc)) { + switch (cachedDesc.data[cachedDesc.offset+cachedDesc.length]) { + case 0x67: + case 0x68: + flags |= DecodeUnit.DU_FLAG_CODEC_CONFIG; + break; + + case 0x65: + flags |= DecodeUnit.DU_FLAG_SYNC_FRAME; + break; + } + } + // Construct the H264 decode unit - DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcFrameDataChain, avcFrameDataLength, 0, frameNumber); + DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcFrameDataChain, avcFrameDataLength, flags, frameNumber); if (directSubmitDr != null) { // Submit directly to the decoder directSubmitDr.submitDecodeUnit(du); From c733be56113f924dd1f80bbe849038ac7ebc61bc Mon Sep 17 00:00:00 2001 From: Iwan Timmer Date: Wed, 26 Feb 2014 16:17:25 +0100 Subject: [PATCH 16/17] Add Javadoc about config tuples --- .../limelight/nvstream/control/Config.java | 140 +++++++++--------- 1 file changed, 70 insertions(+), 70 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/control/Config.java b/moonlight-common/src/com/limelight/nvstream/control/Config.java index ac906e5e..d975a44d 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/Config.java +++ b/moonlight-common/src/com/limelight/nvstream/control/Config.java @@ -10,63 +10,63 @@ public class Config { public static final ConfigTuple[] CONFIG_720_60 = { - new ByteConfigTuple((short)0x1207, (byte)1), - new IntConfigTuple((short)0x120b, 7), - new IntConfigTuple((short)0x120c, 7), - new IntConfigTuple((short)0x120d, 60), - new IntConfigTuple((short)0x120e, 100), - new IntConfigTuple((short)0x120f, 5), - new IntConfigTuple((short)0x1210, 4), - new IntConfigTuple((short)0x1202, 1024), - new ByteConfigTuple((short)0x1203, (byte)0), - new ByteConfigTuple((short)0x1201, (byte)0), - new ByteConfigTuple((short)0x1234, (byte)0), + new ByteConfigTuple((short)0x1207, (byte)1), //iFrameOnDemand + new IntConfigTuple((short)0x120b, 7), //averageBitrate + new IntConfigTuple((short)0x120c, 7), //peakBitrate + new IntConfigTuple((short)0x120d, 60), //gopLength + new IntConfigTuple((short)0x120e, 100), //vbvMultiplier + new IntConfigTuple((short)0x120f, 5), //rateControlMode + new IntConfigTuple((short)0x1210, 4), //slicesPerFrame + new IntConfigTuple((short)0x1202, 1024), //packetSize + new ByteConfigTuple((short)0x1203, (byte)0), //recordServerStats + new ByteConfigTuple((short)0x1201, (byte)0), //serverCapture + new ByteConfigTuple((short)0x1234, (byte)0), //serverNetworkCapture new ByteConfigTuple((short)0x1248, (byte)0), - new ByteConfigTuple((short)0x1208, (byte)1), - new ByteConfigTuple((short)0x1209, (byte)0), - new IntConfigTuple((short)0x1212, 3000), - new IntConfigTuple((short)0x1238, 10000), - new ByteConfigTuple((short)0x1211, (byte)0), - new ByteConfigTuple((short)0x1213, (byte)1), - new IntConfigTuple((short)0x1214, 50), - new IntConfigTuple((short)0x1215, 60), - new IntConfigTuple((short)0x1216, 20), - new IntConfigTuple((short)0x1217, 0), - new IntConfigTuple((short)0x1218, 8), - new IntConfigTuple((short)0x1219, 10), - new IntConfigTuple((short)0x121a, 311), - new IntConfigTuple((short)0x121b, 10000), - new IntConfigTuple((short)0x121c, 2000), - new IntConfigTuple((short)0x121d, 50), - new IntConfigTuple((short)0x121e, 3000), - new IntConfigTuple((short)0x121f, 2), - new IntConfigTuple((short)0x122a, 5000), - new IntConfigTuple((short)0x122b, 500), - new IntConfigTuple((short)0x1220, 75), - new IntConfigTuple((short)0x1221, 25), - new IntConfigTuple((short)0x1222, 10), - new IntConfigTuple((short)0x1223, 60), - new IntConfigTuple((short)0x1224, 30), - new IntConfigTuple((short)0x1225, 3), - new IntConfigTuple((short)0x1226, 1000), - new IntConfigTuple((short)0x1227, 5000), - new IntConfigTuple((short)0x1228, 5000), - new IntConfigTuple((short)0x124e, 110), - new IntConfigTuple((short)0x1237, 10), - new IntConfigTuple((short)0x1236, 6), - new IntConfigTuple((short)0x1235, 4), - new IntConfigTuple((short)0x1242, 20000), + new ByteConfigTuple((short)0x1208, (byte)1), //refPicInvalidation + new ByteConfigTuple((short)0x1209, (byte)0), //enableFrameRateCtrl + new IntConfigTuple((short)0x1212, 3000), //pingBackIntervalMs + new IntConfigTuple((short)0x1238, 10000), //pingBackTimeoutMs + new ByteConfigTuple((short)0x1211, (byte)0), //enableSubframeEncoding + new ByteConfigTuple((short)0x1213, (byte)1), //videoQoSFecEnable + new IntConfigTuple((short)0x1214, 50), //videoQoSFecNumSrcPackets + new IntConfigTuple((short)0x1215, 60), //videoQoSFecNumOutPackets + new IntConfigTuple((short)0x1216, 20), //videoQoSFecRepairPercent + new IntConfigTuple((short)0x1217, 0), //videoQoSTsEnable + new IntConfigTuple((short)0x1218, 8), //videoQoSTsAverageBitrate + new IntConfigTuple((short)0x1219, 10), //videoQoSTsMaximumBitrate + new IntConfigTuple((short)0x121a, 311), //videoQoSBwFlags + new IntConfigTuple((short)0x121b, 10000), //videoQoSBwMaximumBitrate + new IntConfigTuple((short)0x121c, 2000), //videoQoSBwMinimumBitrate + new IntConfigTuple((short)0x121d, 50), //videoQoSBwStatsTime + new IntConfigTuple((short)0x121e, 3000), //videoQoSBwZeroLossCount + new IntConfigTuple((short)0x121f, 2), //videoQoSBwLossThreshold + new IntConfigTuple((short)0x122a, 5000), //videoQoSBwOwdThreshold + new IntConfigTuple((short)0x122b, 500), //videoQoSBwOwdReference + new IntConfigTuple((short)0x1220, 75), //videoQoSBwLossWaitTime + new IntConfigTuple((short)0x1221, 25), //videoQoSBwRateDropMultiplier + new IntConfigTuple((short)0x1222, 10), //videoQoSBwRateGainMultiplier + new IntConfigTuple((short)0x1223, 60), //videoQoSBwMaxFps + new IntConfigTuple((short)0x1224, 30), //videoQoSBwMinFps + new IntConfigTuple((short)0x1225, 3), //videoQoSBwFpsThreshold + new IntConfigTuple((short)0x1226, 1000), //videoQoSBwJitterThreshold + new IntConfigTuple((short)0x1227, 5000), //videoQoSBwJitterWaitTime + new IntConfigTuple((short)0x1228, 5000), //videoQoSBwNoJitterWaitTime + new IntConfigTuple((short)0x124e, 110), + new IntConfigTuple((short)0x1237, 10), //videoQoSBwEarlyDetectionEnableL1Threshold + new IntConfigTuple((short)0x1236, 6), //videoQoSBwEarlyDetectionEnableL0Threshold + new IntConfigTuple((short)0x1235, 4), //videoQoSBwEarlyDetectionDisableThreshold + new IntConfigTuple((short)0x1242, 20000), //videoQoSBwEarlyDetectionWaitTime new IntConfigTuple((short)0x1244, 100), new IntConfigTuple((short)0x1245, 1000), new IntConfigTuple((short)0x1246, 720), new IntConfigTuple((short)0x1247, 480), - new IntConfigTuple((short)0x1229, 5000), - new ByteConfigTuple((short)0x122e, (byte)7), - new IntConfigTuple((short)0x1231, 40), - new IntConfigTuple((short)0x1232, 25), - new IntConfigTuple((short)0x1233, 3000), - new IntConfigTuple((short)0x122c, 3), - new IntConfigTuple((short)0x122d, 10), + new IntConfigTuple((short)0x1229, 5000), //videoQosVideoQualityScoreUpdateTime + new ByteConfigTuple((short)0x122e, (byte)7), //videoQosTrafficType + new IntConfigTuple((short)0x1231, 40), //videoQosBnNotifyUpBoundThreshold + new IntConfigTuple((short)0x1232, 25), //videoQosBnNotifyLowBoundThreshold + new IntConfigTuple((short)0x1233, 3000), //videoQosBnNotifyWaitTime + new IntConfigTuple((short)0x122c, 3), //videoQosInvalidateThreshold + new IntConfigTuple((short)0x122d, 10), //videoQosInvalidateSkipPercentage /*new IntConfigTuple((short)0x123b, 12), new IntConfigTuple((short)0x123c, 3), new IntConfigTuple((short)0x1249, 0), @@ -74,22 +74,22 @@ public class Config { new IntConfigTuple((short)0x124b, 5000), new IntConfigTuple((short)0x124c, 6000), new IntConfigTuple((short)0x124d, 1000),*/ - new IntConfigTuple((short)0x122f, 0), - new ShortConfigTuple((short)0x1230, (short)0), - new IntConfigTuple((short)0x1239, 0), - new IntConfigTuple((short)0x123a, 0), - new IntConfigTuple((short)0x123d, 96000), - new IntConfigTuple((short)0x123e, 5), - new IntConfigTuple((short)0x123f, 1), - new IntConfigTuple((short)0x1243, 100) + new IntConfigTuple((short)0x122f, 0), //riSecurityProtocol + new ShortConfigTuple((short)0x1230, (short)0), //riSecInfoUsePredefinedCert + new IntConfigTuple((short)0x1239, 0), //videoFrameDropIntervalNumber + new IntConfigTuple((short)0x123a, 0), //videoFrameDropContinualNumber + new IntConfigTuple((short)0x123d, 96000), //audioQosBitRate + new IntConfigTuple((short)0x123e, 5), //audioQosPacketDuration + new IntConfigTuple((short)0x123f, 1), //audioQosEnablePacketLossPercentage + new IntConfigTuple((short)0x1243, 100) //audioQosPacketLossPercentageUpdateInterval }; public static final ConfigTuple[] CONFIG_1080_30_DIFF = { - new IntConfigTuple((short)0x120b, 10), - new IntConfigTuple((short)0x120c, 10), - new IntConfigTuple((short)0x121c, 4000), - new IntConfigTuple((short)0x1245, 3000), + new IntConfigTuple((short)0x120b, 10), //averageBitrate + new IntConfigTuple((short)0x120c, 10), //peakBitrate + new IntConfigTuple((short)0x121c, 4000), //videoQoSBwMinimumBitrate + new IntConfigTuple((short)0x1245, 3000), new IntConfigTuple((short)0x1246, 1280), new IntConfigTuple((short)0x1247, 720), /*new IntConfigTuple((short)0x124a, 5000), @@ -98,11 +98,11 @@ public class Config { public static final ConfigTuple[] CONFIG_1080_60_DIFF = { - new IntConfigTuple((short)0x120b, 30), - new IntConfigTuple((short)0x120c, 30), - new IntConfigTuple((short)0x120f, 4), - new IntConfigTuple((short)0x121b, 30000), - new IntConfigTuple((short)0x121c, 25000), + new IntConfigTuple((short)0x120b, 30), //averageBitrate + new IntConfigTuple((short)0x120c, 30), //peakBitrate + new IntConfigTuple((short)0x120f, 4), //rateControlMode + new IntConfigTuple((short)0x121b, 30000), //videoQoSBwMaximumBitrate + new IntConfigTuple((short)0x121c, 25000), //videoQoSBwMinimumBitrate new IntConfigTuple((short)0x1245, 3000), new IntConfigTuple((short)0x1246, 1280), new IntConfigTuple((short)0x1247, 720), @@ -154,7 +154,7 @@ public class Config { tupleSet.add(new IntConfigTuple((short)0x1204, streamConfig.getWidth())); tupleSet.add(new IntConfigTuple((short)0x1205, streamConfig.getHeight())); - tupleSet.add(new IntConfigTuple((short)0x1206, 1)); + tupleSet.add(new IntConfigTuple((short)0x1206, 1)); //videoTransferProtocol tupleSet.add(new IntConfigTuple((short)0x120A, streamConfig.getRefreshRate())); // Start with the initial config for 720p60 From e60420cb2c79827e96a4554132ba77eb456ecdad Mon Sep 17 00:00:00 2001 From: Cameron Gutman Date: Wed, 26 Feb 2014 12:12:06 -0500 Subject: [PATCH 17/17] Update depacketizer to do reference frame invalidation more like the official streamer. This should reduce the frequency of IDR requests by waiting for network stabilization before requesting the IDR frames. We still request IDR frames because reference frame invalidation still doesn't work well. --- .../nvstream/av/video/VideoDepacketizer.java | 54 ++++++++++++------- .../nvstream/control/ControlStream.java | 21 ++++---- 2 files changed, 48 insertions(+), 27 deletions(-) diff --git a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java index c1ef5dd4..7d747055 100644 --- a/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java +++ b/moonlight-common/src/com/limelight/nvstream/av/video/VideoDepacketizer.java @@ -19,7 +19,7 @@ public class VideoDepacketizer { private int nextFrameNumber = 1; private int nextPacketNumber; private int startFrameNumber = 1; - private boolean waitingForFrameStart; + private boolean waitingForNextSuccessfulFrame; // Cached objects private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0); @@ -41,7 +41,7 @@ public class VideoDepacketizer { avcFrameDataChain = null; avcFrameDataLength = 0; } - + private void reassembleAvcFrame(int frameNumber) { // This is the start of a new frame @@ -71,9 +71,15 @@ public class VideoDepacketizer { } else if (!decodedUnits.offer(du)) { System.out.println("Video decoder is too slow! Forced to drop decode units"); - // Invalidate all frames from the start of the DU queue to this frame number + + // Invalidate all frames from the start of the DU queue controlListener.connectionSinkTooSlow(decodedUnits.remove().getFrameNumber(), frameNumber); + + // Remove existing frames decodedUnits.clear(); + + // Add this frame + decodedUnits.add(du); } // Clear old state @@ -201,39 +207,39 @@ public class VideoDepacketizer { System.out.println("Using FEC for error correction"); nextPacketNumber = 1; } - // Discard FEC data early + // Discard the rest of the FEC data until we know how to use it else if (packetIndex >= packetsInFrame) { return; } // Check that this is the next frame boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0; - if (firstPacket && waitingForFrameStart) { - // This is the next frame after a loss event - controlListener.connectionDetectedFrameLoss(startFrameNumber, frameIndex - 1); - startFrameNumber = nextFrameNumber = frameIndex; - nextPacketNumber = 0; - waitingForFrameStart = false; - clearAvcFrameState(); - } - else if (frameIndex > nextFrameNumber) { + if (frameIndex > nextFrameNumber) { // Nope, but we can still work with it if it's // the start of the next frame if (firstPacket) { System.out.println("Got start of frame "+frameIndex+ " when expecting packet "+nextPacketNumber+ " of frame "+nextFrameNumber); - controlListener.connectionDetectedFrameLoss(startFrameNumber, frameIndex - 1); - startFrameNumber = nextFrameNumber = frameIndex; + nextFrameNumber = frameIndex; nextPacketNumber = 0; clearAvcFrameState(); + + // Tell the encoder when we're done decoding this frame + // that we lost some previous frames + waitingForNextSuccessfulFrame = true; } else { System.out.println("Got packet "+packetIndex+" of frame "+frameIndex+ " when expecting packet "+nextPacketNumber+ " of frame "+nextFrameNumber); - // We dropped the start of this frame too, so pick up on the next frame - waitingForFrameStart = true; + // We dropped the start of this frame too + waitingForNextSuccessfulFrame = true; + + // Try to pickup on the next frame + nextFrameNumber = frameIndex + 1; + nextPacketNumber = 0; + clearAvcFrameState(); return; } } @@ -247,7 +253,12 @@ public class VideoDepacketizer { if (packetIndex != nextPacketNumber) { System.out.println("Frame "+frameIndex+": expected packet "+nextPacketNumber+" but got "+packetIndex); // At this point, we're guaranteed that it's not FEC data that we lost - waitingForFrameStart = true; + waitingForNextSuccessfulFrame = true; + + // Skip this frame + nextFrameNumber++; + nextPacketNumber = 0; + clearAvcFrameState(); return; } @@ -279,6 +290,13 @@ public class VideoDepacketizer { if ((packet.getFlags() & VideoPacket.FLAG_EOF) != 0) { reassembleAvcFrame(packet.getFrameIndex()); + + if (waitingForNextSuccessfulFrame) { + // This is the next successful frame after a loss event + controlListener.connectionDetectedFrameLoss(startFrameNumber, nextFrameNumber - 1); + waitingForNextSuccessfulFrame = false; + } + startFrameNumber = nextFrameNumber; } } diff --git a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java index 259ef2a2..dc6bab60 100644 --- a/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java +++ b/moonlight-common/src/com/limelight/nvstream/control/ControlStream.java @@ -204,6 +204,9 @@ public class ControlStream implements ConnectionStatusListener { lastTuple = nextTuple; } + // The server expects this to be the firstLostFrame + 1 + tuple[0]++; + // Update the end of the range to the latest tuple if (lastTuple != null) { tuple[1] = lastTuple[1]; @@ -256,14 +259,14 @@ public class ControlStream implements ConnectionStatusListener { return sendAndGetReply(new NvCtlPacket(PTYPE_1405, PPAYLEN_1405)); } - private void sendResync(int firstLostFrame, int lastLostFrame) throws IOException + private void sendResync(int firstLostFrame, int nextSuccessfulFrame) throws IOException { ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLEN_RESYNC]).order(ByteOrder.LITTLE_ENDIAN); + //conf.putLong(firstLostFrame); + //conf.putLong(nextSuccessfulFrame); conf.putLong(0); conf.putLong(0xFFFFF); - //conf.putLong(firstLostFrame); - //conf.putLong(lastLostFrame); sendAndGetReply(new NvCtlPacket(PTYPE_RESYNC, PPAYLEN_RESYNC, conf.array())); } @@ -428,11 +431,11 @@ public class ControlStream implements ConnectionStatusListener { abort(); } - private void resyncConnection(int firstLostFrame, int lastLostFrame) { - invalidReferenceFrameTuples.add(new int[]{firstLostFrame, lastLostFrame}); + private void resyncConnection(int firstLostFrame, int nextSuccessfulFrame) { + invalidReferenceFrameTuples.add(new int[]{firstLostFrame, nextSuccessfulFrame}); } - public void connectionDetectedFrameLoss(int firstLostFrame, int lastLostFrame) { + public void connectionDetectedFrameLoss(int firstLostFrame, int nextSuccessfulFrame) { if (System.currentTimeMillis() > LOSS_PERIOD_MS + lossTimestamp) { lossCount++; lossTimestamp = System.currentTimeMillis(); @@ -445,15 +448,15 @@ public class ControlStream implements ConnectionStatusListener { } } - resyncConnection(firstLostFrame, lastLostFrame); + resyncConnection(firstLostFrame, nextSuccessfulFrame); } - public void connectionSinkTooSlow(int firstLostFrame, int lastLostFrame) { + public void connectionSinkTooSlow(int firstLostFrame, int nextSuccessfulFrame) { if (++slowSinkCount == MAX_SLOW_SINK_COUNT) { listener.displayTransientMessage("Your device is processing the A/V data too slowly. Try lowering stream settings."); slowSinkCount = -MAX_SLOW_SINK_COUNT * MESSAGE_DELAY_FACTOR; } - resyncConnection(firstLostFrame, lastLostFrame); + resyncConnection(firstLostFrame, nextSuccessfulFrame); } }