Merge branch 'master' into logs

Conflicts:
	src/com/limelight/nvstream/av/audio/AudioDepacketizer.java
	src/com/limelight/nvstream/av/video/VideoDepacketizer.java
	src/com/limelight/nvstream/control/ControlStream.java
This commit is contained in:
Cameron Gutman 2014-02-26 16:22:04 -05:00
commit 50e7deeb32
16 changed files with 608 additions and 248 deletions

View File

@ -29,4 +29,5 @@ public interface NvConnectionListener {
public void connectionTerminated(Exception e); public void connectionTerminated(Exception e);
public void displayMessage(String message); public void displayMessage(String message);
public void displayTransientMessage(String message);
} }

View File

@ -3,5 +3,7 @@ package com.limelight.nvstream.av;
public interface ConnectionStatusListener { public interface ConnectionStatusListener {
public void connectionTerminated(); public void connectionTerminated();
public void connectionNeedsResync(); public void connectionDetectedFrameLoss(int firstLostFrame, int lastLostFrame);
public void connectionSinkTooSlow(int firstLostFrame, int lastLostFrame);
} }

View File

@ -7,17 +7,22 @@ public class DecodeUnit {
public static final int TYPE_H264 = 1; public static final int TYPE_H264 = 1;
public static final int TYPE_OPUS = 2; public static final int TYPE_OPUS = 2;
public static final int DU_FLAG_CODEC_CONFIG = 0x1;
public static final int DU_FLAG_SYNC_FRAME = 0x2;
private int type; private int type;
private List<ByteBufferDescriptor> bufferList; private List<ByteBufferDescriptor> bufferList;
private int dataLength; private int dataLength;
private int flags; private int flags;
private int frameNumber;
public DecodeUnit(int type, List<ByteBufferDescriptor> bufferList, int dataLength, int flags) public DecodeUnit(int type, List<ByteBufferDescriptor> bufferList, int dataLength, int flags, int frameNumber)
{ {
this.type = type; this.type = type;
this.bufferList = bufferList; this.bufferList = bufferList;
this.dataLength = dataLength; this.dataLength = dataLength;
this.flags = flags; this.flags = flags;
this.frameNumber = frameNumber;
} }
public int getType() public int getType()
@ -39,4 +44,9 @@ public class DecodeUnit {
{ {
return dataLength; return dataLength;
} }
public int getFrameNumber()
{
return frameNumber;
}
} }

View File

@ -12,9 +12,16 @@ public class AudioDepacketizer {
private LinkedBlockingQueue<ByteBufferDescriptor> decodedUnits = private LinkedBlockingQueue<ByteBufferDescriptor> decodedUnits =
new LinkedBlockingQueue<ByteBufferDescriptor>(DU_LIMIT); new LinkedBlockingQueue<ByteBufferDescriptor>(DU_LIMIT);
private AudioRenderer directSubmitRenderer;
// Sequencing state // Sequencing state
private short lastSequenceNumber; private short lastSequenceNumber;
public AudioDepacketizer(AudioRenderer directSubmitRenderer)
{
this.directSubmitRenderer = directSubmitRenderer;
}
private void decodeData(byte[] data, int off, int len) private void decodeData(byte[] data, int off, int len)
{ {
// Submit this data to the decoder // Submit this data to the decoder
@ -25,8 +32,10 @@ public class AudioDepacketizer {
// Return value of decode is frames (shorts) decoded per channel // Return value of decode is frames (shorts) decoded per channel
decodeLen *= 2*OpusDecoder.getChannelCount(); decodeLen *= 2*OpusDecoder.getChannelCount();
// Put it on the decoded queue if (directSubmitRenderer != null) {
if (!decodedUnits.offer(new ByteBufferDescriptor(pcmData, 0, decodeLen))) { directSubmitRenderer.playDecodedAudio(pcmData, 0, decodeLen);
}
else if (!decodedUnits.offer(new ByteBufferDescriptor(pcmData, 0, decodeLen))) {
LimeLog.warning("Audio player too slow! Forced to drop decoded samples"); LimeLog.warning("Audio player too slow! Forced to drop decoded samples");
// Clear out the queue // Clear out the queue
decodedUnits.clear(); decodedUnits.clear();

View File

@ -1,6 +1,11 @@
package com.limelight.nvstream.av.audio; package com.limelight.nvstream.av.audio;
public interface AudioRenderer { public interface AudioRenderer {
// playDecodedAudio() is lightweight, so don't use an extra thread for playback
public static final int CAPABILITY_DIRECT_SUBMIT = 0x1;
public int getCapabilities();
public void streamInitialized(int channelCount, int sampleRate); public void streamInitialized(int channelCount, int sampleRate);
public void playDecodedAudio(byte[] audioData, int offset, int length); public void playDecodedAudio(byte[] audioData, int offset, int length);

View File

@ -20,7 +20,7 @@ public class AudioStream {
private DatagramSocket rtp; private DatagramSocket rtp;
private AudioDepacketizer depacketizer = new AudioDepacketizer(); private AudioDepacketizer depacketizer;
private LinkedList<Thread> threads = new LinkedList<Thread>(); private LinkedList<Thread> threads = new LinkedList<Thread>();
@ -74,7 +74,9 @@ public class AudioStream {
startReceiveThread(); startReceiveThread();
if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) {
startDecoderThread(); startDecoderThread();
}
startUdpPingThread(); startUdpPingThread();
} }
@ -97,6 +99,13 @@ public class AudioStream {
} }
streamListener.streamInitialized(OpusDecoder.getChannelCount(), OpusDecoder.getSampleRate()); streamListener.streamInitialized(OpusDecoder.getChannelCount(), OpusDecoder.getSampleRate());
if ((streamListener.getCapabilities() & AudioRenderer.CAPABILITY_DIRECT_SUBMIT) != 0) {
depacketizer = new AudioDepacketizer(streamListener);
}
else {
depacketizer = new AudioDepacketizer(null);
}
} }
private void startDecoderThread() private void startDecoderThread()

View File

@ -7,6 +7,11 @@ public interface VideoDecoderRenderer {
public static final int FLAG_FORCE_HARDWARE_DECODING = 0x2; public static final int FLAG_FORCE_HARDWARE_DECODING = 0x2;
public static final int FLAG_FORCE_SOFTWARE_DECODING = 0x4; public static final int FLAG_FORCE_SOFTWARE_DECODING = 0x4;
// SubmitDecodeUnit() is lightweight, so don't use an extra thread for decoding
public static final int CAPABILITY_DIRECT_SUBMIT = 0x1;
public int getCapabilities();
public void setup(int width, int height, int redrawRate, Object renderTarget, int drFlags); public void setup(int width, int height, int redrawRate, Object renderTarget, int drFlags);
public void start(); public void start();

View File

@ -11,112 +11,85 @@ import com.limelight.nvstream.av.ConnectionStatusListener;
public class VideoDepacketizer { public class VideoDepacketizer {
// Current NAL state // Current frame state
private LinkedList<ByteBufferDescriptor> avcNalDataChain = null; private LinkedList<ByteBufferDescriptor> avcFrameDataChain = null;
private int avcNalDataLength = 0; private int avcFrameDataLength = 0;
private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN; private int currentlyDecoding = DecodeUnit.TYPE_UNKNOWN;
// Sequencing state // Sequencing state
private short lastSequenceNumber; private int nextFrameNumber = 1;
private int nextPacketNumber;
private int startFrameNumber = 1;
private boolean waitingForNextSuccessfulFrame;
// Cached objects // Cached objects
private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0); private ByteBufferDescriptor cachedDesc = new ByteBufferDescriptor(null, 0, 0);
private ConnectionStatusListener controlListener; private ConnectionStatusListener controlListener;
private VideoDecoderRenderer directSubmitDr;
private static final int DU_LIMIT = 15; private static final int DU_LIMIT = 15;
private LinkedBlockingQueue<DecodeUnit> decodedUnits = new LinkedBlockingQueue<DecodeUnit>(DU_LIMIT); private LinkedBlockingQueue<DecodeUnit> decodedUnits = new LinkedBlockingQueue<DecodeUnit>(DU_LIMIT);
public VideoDepacketizer(ConnectionStatusListener controlListener) public VideoDepacketizer(VideoDecoderRenderer directSubmitDr, ConnectionStatusListener controlListener)
{ {
this.directSubmitDr = directSubmitDr;
this.controlListener = controlListener; this.controlListener = controlListener;
} }
private void clearAvcNalState() private void clearAvcFrameState()
{ {
avcNalDataChain = null; avcFrameDataChain = null;
avcNalDataLength = 0; avcFrameDataLength = 0;
} }
private void reassembleAvcNal() private void reassembleAvcFrame(int frameNumber)
{ {
// This is the start of a new NAL // This is the start of a new frame
if (avcNalDataChain != null && avcNalDataLength != 0) { if (avcFrameDataChain != null && avcFrameDataLength != 0) {
int flags = 0;
ByteBufferDescriptor firstBuffer = avcFrameDataChain.getFirst();
if (NAL.getSpecialSequenceDescriptor(firstBuffer, cachedDesc) && NAL.isAvcFrameStart(cachedDesc)) {
switch (cachedDesc.data[cachedDesc.offset+cachedDesc.length]) {
case 0x67:
case 0x68:
flags |= DecodeUnit.DU_FLAG_CODEC_CONFIG;
break;
case 0x65:
flags |= DecodeUnit.DU_FLAG_SYNC_FRAME;
break;
}
}
// Construct the H264 decode unit // Construct the H264 decode unit
DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcNalDataChain, avcNalDataLength, 0); DecodeUnit du = new DecodeUnit(DecodeUnit.TYPE_H264, avcFrameDataChain, avcFrameDataLength, flags, frameNumber);
if (!decodedUnits.offer(du)) { if (directSubmitDr != null) {
// We need a new IDR frame since we're discarding data now // Submit directly to the decoder
directSubmitDr.submitDecodeUnit(du);
}
else if (!decodedUnits.offer(du)) {
LimeLog.warning("Video decoder is too slow! Forced to drop decode units"); LimeLog.warning("Video decoder is too slow! Forced to drop decode units");
// Invalidate all frames from the start of the DU queue
controlListener.connectionSinkTooSlow(decodedUnits.remove().getFrameNumber(), frameNumber);
// Remove existing frames
decodedUnits.clear(); decodedUnits.clear();
controlListener.connectionNeedsResync();
// Add this frame
decodedUnits.add(du);
} }
// Clear old state // Clear old state
clearAvcNalState(); clearAvcFrameState();
} }
} }
/* Currently unused pending bugfixes */ public void addInputDataSlow(VideoPacket packet, ByteBufferDescriptor location)
public void addInputDataO1(VideoPacket packet)
{ {
ByteBufferDescriptor location = packet.getNewPayloadDescriptor();
// SPS and PPS packet doesn't have standard headers, so submit it as is
if (location.length < 968) {
avcNalDataChain = new LinkedList<ByteBufferDescriptor>();
avcNalDataLength = 0;
avcNalDataChain.add(location);
avcNalDataLength += location.length;
reassembleAvcNal();
}
else {
int packetIndex = packet.getPacketIndex();
int packetsInFrame = packet.getTotalPackets();
// Check if this is the first packet for a frame
if (packetIndex == 0) {
// Setup state for the new frame
avcNalDataChain = new LinkedList<ByteBufferDescriptor>();
avcNalDataLength = 0;
}
// Check if this packet falls in the range of packets in frame
if (packetIndex >= packetsInFrame) {
// This isn't H264 frame data
return;
}
// Adjust the length to only contain valid data
location.length = packet.getPayloadLength();
// Add the payload data to the chain
if (avcNalDataChain != null) {
avcNalDataChain.add(location);
avcNalDataLength += location.length;
}
// Reassemble the NALs if this was the last packet for this frame
if (packetIndex + 1 == packetsInFrame) {
reassembleAvcNal();
}
}
}
public void addInputData(VideoPacket packet)
{
ByteBufferDescriptor location = packet.getNewPayloadDescriptor();
if (location.length == 968) {
if (packet.getPacketIndex() < packet.getTotalPackets()) {
location.length = packet.getPayloadLength();
}
else {
return;
}
}
while (location.length != 0) while (location.length != 0)
{ {
// Remember the start of the NAL data in this packet // Remember the start of the NAL data in this packet
@ -134,11 +107,11 @@ public class VideoDepacketizer {
if (NAL.isAvcFrameStart(cachedDesc)) if (NAL.isAvcFrameStart(cachedDesc))
{ {
// Reassemble any pending AVC NAL // Reassemble any pending AVC NAL
reassembleAvcNal(); reassembleAvcFrame(packet.getFrameIndex());
// Setup state for the new NAL // Setup state for the new NAL
avcNalDataChain = new LinkedList<ByteBufferDescriptor>(); avcFrameDataChain = new LinkedList<ByteBufferDescriptor>();
avcNalDataLength = 0; avcFrameDataLength = 0;
} }
// Skip the start sequence // Skip the start sequence
@ -151,7 +124,7 @@ public class VideoDepacketizer {
if (currentlyDecoding == DecodeUnit.TYPE_H264 && if (currentlyDecoding == DecodeUnit.TYPE_H264 &&
NAL.isPadding(cachedDesc)) { NAL.isPadding(cachedDesc)) {
// The decode unit is complete // The decode unit is complete
reassembleAvcNal(); reassembleAvcFrame(packet.getFrameIndex());
} }
// Not decoding AVC // Not decoding AVC
@ -187,39 +160,150 @@ public class VideoDepacketizer {
location.length--; location.length--;
} }
if (currentlyDecoding == DecodeUnit.TYPE_H264 && avcNalDataChain != null) if (currentlyDecoding == DecodeUnit.TYPE_H264 && avcFrameDataChain != null)
{ {
ByteBufferDescriptor data = new ByteBufferDescriptor(location.data, start, location.offset-start); ByteBufferDescriptor data = new ByteBufferDescriptor(location.data, start, location.offset-start);
// Add a buffer descriptor describing the NAL data in this packet // Add a buffer descriptor describing the NAL data in this packet
avcNalDataChain.add(data); avcFrameDataChain.add(data);
avcNalDataLength += location.offset-start; avcFrameDataLength += location.offset-start;
} }
} }
} }
public void addInputDataFast(VideoPacket packet, ByteBufferDescriptor location, boolean firstPacket)
{
if (firstPacket) {
// Setup state for the new frame
avcFrameDataChain = new LinkedList<ByteBufferDescriptor>();
avcFrameDataLength = 0;
}
// Add the payload data to the chain
avcFrameDataChain.add(location);
avcFrameDataLength += location.length;
}
public void addInputData(VideoPacket packet)
{
ByteBufferDescriptor location = packet.getNewPayloadDescriptor();
// Runt packets get decoded using the slow path
// These packets stand alone so there's no need to verify
// sequencing before submitting
if (location.length < 968) {
addInputDataSlow(packet, location);
return;
}
int frameIndex = packet.getFrameIndex();
int packetIndex = packet.getPacketIndex();
int packetsInFrame = packet.getTotalPackets();
// We can use FEC to correct single packet errors
// on single packet frames because we just get a
// duplicate of the original packet
if (packetsInFrame == 1 && packetIndex == 1 &&
nextPacketNumber == 0 && frameIndex == nextFrameNumber) {
LimeLog.info("Using FEC for error correction");
nextPacketNumber = 1;
}
// Discard the rest of the FEC data until we know how to use it
else if (packetIndex >= packetsInFrame) {
return;
}
// Check that this is the next frame
boolean firstPacket = (packet.getFlags() & VideoPacket.FLAG_SOF) != 0;
if (frameIndex > nextFrameNumber) {
// Nope, but we can still work with it if it's
// the start of the next frame
if (firstPacket) {
LimeLog.warning("Got start of frame "+frameIndex+
" when expecting packet "+nextPacketNumber+
" of frame "+nextFrameNumber);
nextFrameNumber = frameIndex;
nextPacketNumber = 0;
clearAvcFrameState();
// Tell the encoder when we're done decoding this frame
// that we lost some previous frames
waitingForNextSuccessfulFrame = true;
}
else {
LimeLog.warning("Got packet "+packetIndex+" of frame "+frameIndex+
" when expecting packet "+nextPacketNumber+
" of frame "+nextFrameNumber);
// We dropped the start of this frame too
waitingForNextSuccessfulFrame = true;
// Try to pickup on the next frame
nextFrameNumber = frameIndex + 1;
nextPacketNumber = 0;
clearAvcFrameState();
return;
}
}
else if (frameIndex < nextFrameNumber) {
LimeLog.info("Frame "+frameIndex+" is behind our current frame number "+nextFrameNumber);
// Discard the frame silently if it's behind our current sequence number
return;
}
// We know it's the right frame, now check the packet number
if (packetIndex != nextPacketNumber) {
LimeLog.warning("Frame "+frameIndex+": expected packet "+nextPacketNumber+" but got "+packetIndex);
// At this point, we're guaranteed that it's not FEC data that we lost
waitingForNextSuccessfulFrame = true;
// Skip this frame
nextFrameNumber++;
nextPacketNumber = 0;
clearAvcFrameState();
return;
}
nextPacketNumber++;
// Remove extra padding
location.length = packet.getPayloadLength();
if (firstPacket)
{
if (NAL.getSpecialSequenceDescriptor(location, cachedDesc) && NAL.isAvcFrameStart(cachedDesc)
&& cachedDesc.data[cachedDesc.offset+cachedDesc.length] == 0x67)
{
// SPS and PPS prefix is padded between NALs, so we must decode it with the slow path
clearAvcFrameState();
addInputDataSlow(packet, location);
return;
}
}
addInputDataFast(packet, location, firstPacket);
// We can't use the EOF flag here because real frames can be split across
// multiple "frames" when packetized to fit under the bandwidth ceiling
if (packetIndex + 1 >= packetsInFrame) {
nextFrameNumber++;
nextPacketNumber = 0;
}
if ((packet.getFlags() & VideoPacket.FLAG_EOF) != 0) {
reassembleAvcFrame(packet.getFrameIndex());
if (waitingForNextSuccessfulFrame) {
// This is the next successful frame after a loss event
controlListener.connectionDetectedFrameLoss(startFrameNumber, nextFrameNumber - 1);
waitingForNextSuccessfulFrame = false;
}
startFrameNumber = nextFrameNumber;
}
}
public void addInputData(RtpPacket packet) public void addInputData(RtpPacket packet)
{ {
short seq = packet.getSequenceNumber();
// Toss out the current NAL if we receive a packet that is
// out of sequence
if (lastSequenceNumber != 0 &&
(short)(lastSequenceNumber + 1) != seq)
{
LimeLog.warning("Received OOS video data (expected "+(lastSequenceNumber + 1)+", got "+seq+")");
// Reset the depacketizer state
clearAvcNalState();
// Request an IDR frame
controlListener.connectionNeedsResync();
}
lastSequenceNumber = seq;
// Pass the payload to the non-sequencing parser
ByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor(); ByteBufferDescriptor rtpPayload = packet.getNewPayloadDescriptor();
addInputData(new VideoPacket(rtpPayload)); addInputData(new VideoPacket(rtpPayload));
} }

View File

@ -12,6 +12,10 @@ public class VideoPacket {
private int packetIndex; private int packetIndex;
private int totalPackets; private int totalPackets;
private int payloadLength; private int payloadLength;
private int flags;
public static final int FLAG_EOF = 0x2;
public static final int FLAG_SOF = 0x4;
public VideoPacket(ByteBufferDescriptor rtpPayload) public VideoPacket(ByteBufferDescriptor rtpPayload)
{ {
@ -23,12 +27,15 @@ public class VideoPacket {
frameIndex = bb.getInt(); frameIndex = bb.getInt();
packetIndex = bb.getInt(); packetIndex = bb.getInt();
totalPackets = bb.getInt(); totalPackets = bb.getInt();
flags = bb.getInt();
bb.position(bb.position()+4);
payloadLength = bb.getInt(); payloadLength = bb.getInt();
} }
public int getFlags()
{
return flags;
}
public int getFrameIndex() public int getFrameIndex()
{ {
return frameIndex; return frameIndex;

View File

@ -32,6 +32,7 @@ public class VideoStream {
private LinkedList<Thread> threads = new LinkedList<Thread>(); private LinkedList<Thread> threads = new LinkedList<Thread>();
private NvConnectionListener listener; private NvConnectionListener listener;
private ConnectionStatusListener avConnListener;
private VideoDepacketizer depacketizer; private VideoDepacketizer depacketizer;
private StreamConfiguration streamConfig; private StreamConfiguration streamConfig;
@ -44,7 +45,7 @@ public class VideoStream {
{ {
this.host = host; this.host = host;
this.listener = listener; this.listener = listener;
this.depacketizer = new VideoDepacketizer(avConnListener); this.avConnListener = avConnListener;
this.streamConfig = streamConfig; this.streamConfig = streamConfig;
} }
@ -131,6 +132,13 @@ public class VideoStream {
if (decRend != null) { if (decRend != null) {
decRend.setup(streamConfig.getWidth(), streamConfig.getHeight(), decRend.setup(streamConfig.getWidth(), streamConfig.getHeight(),
60, renderTarget, drFlags); 60, renderTarget, drFlags);
if ((decRend.getCapabilities() & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) != 0) {
depacketizer = new VideoDepacketizer(decRend, avConnListener);
}
else {
depacketizer = new VideoDepacketizer(null, avConnListener);
}
} }
} }
@ -158,8 +166,10 @@ public class VideoStream {
// early packets // early packets
startReceiveThread(); startReceiveThread();
// Start decoding the data we're receiving // Start a decode thread if we're not doing direct submit
if ((decRend.getCapabilities() & VideoDecoderRenderer.CAPABILITY_DIRECT_SUBMIT) == 0) {
startDecoderThread(); startDecoderThread();
}
// Start the renderer // Start the renderer
decRend.start(); decRend.start();

View File

@ -0,0 +1,17 @@
package com.limelight.nvstream.control;
public class ByteConfigTuple extends ConfigTuple {
public static final short PAYLOAD_LENGTH = 1;
public byte payload;
public ByteConfigTuple(short packetType, byte payload) {
super(packetType, PAYLOAD_LENGTH);
this.payload = payload;
}
@Override
public byte[] payloadToWire() {
return new byte[] {payload};
}
}

View File

@ -2,103 +2,113 @@ package com.limelight.nvstream.control;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.util.ArrayList;
import com.limelight.nvstream.StreamConfiguration; import com.limelight.nvstream.StreamConfiguration;
public class Config { public class Config {
public static final int[] UNKNOWN_CONFIG = public static final ConfigTuple[] CONFIG_720_60 =
{ {
70151, new ByteConfigTuple((short)0x1207, (byte)1), //iFrameOnDemand
68291329, new IntConfigTuple((short)0x120b, 7), //averageBitrate
1280, new IntConfigTuple((short)0x120c, 7), //peakBitrate
68291584, new IntConfigTuple((short)0x120d, 60), //gopLength
1280, new IntConfigTuple((short)0x120e, 100), //vbvMultiplier
68291840, new IntConfigTuple((short)0x120f, 5), //rateControlMode
15360, new IntConfigTuple((short)0x1210, 4), //slicesPerFrame
68292096, new IntConfigTuple((short)0x1202, 1024), //packetSize
25600, new ByteConfigTuple((short)0x1203, (byte)0), //recordServerStats
68292352, new ByteConfigTuple((short)0x1201, (byte)0), //serverCapture
2048, new ByteConfigTuple((short)0x1234, (byte)0), //serverNetworkCapture
68292608, new ByteConfigTuple((short)0x1248, (byte)0),
1024, new ByteConfigTuple((short)0x1208, (byte)1), //refPicInvalidation
68289024, new ByteConfigTuple((short)0x1209, (byte)0), //enableFrameRateCtrl
262144, new IntConfigTuple((short)0x1212, 3000), //pingBackIntervalMs
17957632, new IntConfigTuple((short)0x1238, 10000), //pingBackTimeoutMs
302055424, new ByteConfigTuple((short)0x1211, (byte)0), //enableSubframeEncoding
134217729, new ByteConfigTuple((short)0x1213, (byte)1), //videoQoSFecEnable
16777490, new IntConfigTuple((short)0x1214, 50), //videoQoSFecNumSrcPackets
70153, new IntConfigTuple((short)0x1215, 60), //videoQoSFecNumOutPackets
68293120, new IntConfigTuple((short)0x1216, 20), //videoQoSFecRepairPercent
768000, new IntConfigTuple((short)0x1217, 0), //videoQoSTsEnable
17961216, new IntConfigTuple((short)0x1218, 8), //videoQoSTsAverageBitrate
303235072, new IntConfigTuple((short)0x1219, 10), //videoQoSTsMaximumBitrate
335609857, new IntConfigTuple((short)0x121a, 311), //videoQoSBwFlags
838861842, new IntConfigTuple((short)0x121b, 10000), //videoQoSBwMaximumBitrate
352321536, new IntConfigTuple((short)0x121c, 2000), //videoQoSBwMinimumBitrate
1006634002, new IntConfigTuple((short)0x121d, 50), //videoQoSBwStatsTime
369098752, new IntConfigTuple((short)0x121e, 3000), //videoQoSBwZeroLossCount
335545362, new IntConfigTuple((short)0x121f, 2), //videoQoSBwLossThreshold
385875968, new IntConfigTuple((short)0x122a, 5000), //videoQoSBwOwdThreshold
1042, new IntConfigTuple((short)0x122b, 500), //videoQoSBwOwdReference
402653184, new IntConfigTuple((short)0x1220, 75), //videoQoSBwLossWaitTime
134218770, new IntConfigTuple((short)0x1221, 25), //videoQoSBwRateDropMultiplier
419430400, new IntConfigTuple((short)0x1222, 10), //videoQoSBwRateGainMultiplier
167773202, new IntConfigTuple((short)0x1223, 60), //videoQoSBwMaxFps
436207616, new IntConfigTuple((short)0x1224, 30), //videoQoSBwMinFps
855638290, new IntConfigTuple((short)0x1225, 3), //videoQoSBwFpsThreshold
266779, new IntConfigTuple((short)0x1226, 1000), //videoQoSBwJitterThreshold
7000, new IntConfigTuple((short)0x1227, 5000), //videoQoSBwJitterWaitTime
266780, new IntConfigTuple((short)0x1228, 5000), //videoQoSBwNoJitterWaitTime
2000, new IntConfigTuple((short)0x124e, 110),
266781, new IntConfigTuple((short)0x1237, 10), //videoQoSBwEarlyDetectionEnableL1Threshold
50, new IntConfigTuple((short)0x1236, 6), //videoQoSBwEarlyDetectionEnableL0Threshold
266782, new IntConfigTuple((short)0x1235, 4), //videoQoSBwEarlyDetectionDisableThreshold
3000, new IntConfigTuple((short)0x1242, 20000), //videoQoSBwEarlyDetectionWaitTime
266783, new IntConfigTuple((short)0x1244, 100),
2, new IntConfigTuple((short)0x1245, 1000),
266794, new IntConfigTuple((short)0x1246, 720),
5000, new IntConfigTuple((short)0x1247, 480),
266795, new IntConfigTuple((short)0x1229, 5000), //videoQosVideoQualityScoreUpdateTime
500, new ByteConfigTuple((short)0x122e, (byte)7), //videoQosTrafficType
266784, new IntConfigTuple((short)0x1231, 40), //videoQosBnNotifyUpBoundThreshold
75, new IntConfigTuple((short)0x1232, 25), //videoQosBnNotifyLowBoundThreshold
266785, new IntConfigTuple((short)0x1233, 3000), //videoQosBnNotifyWaitTime
25, new IntConfigTuple((short)0x122c, 3), //videoQosInvalidateThreshold
266786, new IntConfigTuple((short)0x122d, 10), //videoQosInvalidateSkipPercentage
10, /*new IntConfigTuple((short)0x123b, 12),
266787, new IntConfigTuple((short)0x123c, 3),
60, new IntConfigTuple((short)0x1249, 0),
266788, new IntConfigTuple((short)0x124a, 4000),
30, new IntConfigTuple((short)0x124b, 5000),
266789, new IntConfigTuple((short)0x124c, 6000),
3, new IntConfigTuple((short)0x124d, 1000),*/
266790, new IntConfigTuple((short)0x122f, 0), //riSecurityProtocol
1000, new ShortConfigTuple((short)0x1230, (short)0), //riSecInfoUsePredefinedCert
266791, new IntConfigTuple((short)0x1239, 0), //videoFrameDropIntervalNumber
5000, new IntConfigTuple((short)0x123a, 0), //videoFrameDropContinualNumber
266792, new IntConfigTuple((short)0x123d, 96000), //audioQosBitRate
5000, new IntConfigTuple((short)0x123e, 5), //audioQosPacketDuration
266793, new IntConfigTuple((short)0x123f, 1), //audioQosEnablePacketLossPercentage
5000, new IntConfigTuple((short)0x1243, 100) //audioQosPacketLossPercentageUpdateInterval
70190,
68301063,
10240,
68301312,
6400,
68301568,
768000,
68299776,
768,
68300032,
2560,
68300544,
0,
34746368,
(int)0xFE000000
}; };
public static final int CONFIG_SIZE = ((8 + UNKNOWN_CONFIG.length) * 4) + 3; public static final ConfigTuple[] CONFIG_1080_30_DIFF =
{
new IntConfigTuple((short)0x120b, 10), //averageBitrate
new IntConfigTuple((short)0x120c, 10), //peakBitrate
new IntConfigTuple((short)0x121c, 4000), //videoQoSBwMinimumBitrate
new IntConfigTuple((short)0x1245, 3000),
new IntConfigTuple((short)0x1246, 1280),
new IntConfigTuple((short)0x1247, 720),
/*new IntConfigTuple((short)0x124a, 5000),
new IntConfigTuple((short)0x124c, 7000),*/
};
public static final ConfigTuple[] CONFIG_1080_60_DIFF =
{
new IntConfigTuple((short)0x120b, 30), //averageBitrate
new IntConfigTuple((short)0x120c, 30), //peakBitrate
new IntConfigTuple((short)0x120f, 4), //rateControlMode
new IntConfigTuple((short)0x121b, 30000), //videoQoSBwMaximumBitrate
new IntConfigTuple((short)0x121c, 25000), //videoQoSBwMinimumBitrate
new IntConfigTuple((short)0x1245, 3000),
new IntConfigTuple((short)0x1246, 1280),
new IntConfigTuple((short)0x1247, 720),
/*new IntConfigTuple((short)0x124a, 5000),
new IntConfigTuple((short)0x124c, 7000),*/
};
private StreamConfiguration streamConfig; private StreamConfiguration streamConfig;
@ -106,37 +116,80 @@ public class Config {
this.streamConfig = streamConfig; this.streamConfig = streamConfig;
} }
private void updateSetWithConfig(ArrayList<ConfigTuple> set, ConfigTuple[] config)
{
for (ConfigTuple tuple : config)
{
int i;
for (i = 0; i < set.size(); i++) {
ConfigTuple existingTuple = set.get(i);
if (existingTuple.packetType == tuple.packetType) {
set.remove(i);
set.add(i, tuple);
break;
}
}
if (i == set.size()) {
set.add(tuple);
}
}
}
private int getConfigOnWireSize(ArrayList<ConfigTuple> tupleSet)
{
int size = 0;
for (ConfigTuple t : tupleSet)
{
size += ConfigTuple.HEADER_LENGTH + t.payloadLength;
}
return size;
}
private ArrayList<ConfigTuple> generateTupleSet() {
ArrayList<ConfigTuple> tupleSet = new ArrayList<ConfigTuple>();
tupleSet.add(new IntConfigTuple((short)0x1204, streamConfig.getWidth()));
tupleSet.add(new IntConfigTuple((short)0x1205, streamConfig.getHeight()));
tupleSet.add(new IntConfigTuple((short)0x1206, 1)); //videoTransferProtocol
tupleSet.add(new IntConfigTuple((short)0x120A, streamConfig.getRefreshRate()));
// Start with the initial config for 720p60
updateSetWithConfig(tupleSet, CONFIG_720_60);
if (streamConfig.getWidth() >= 1920 &&
streamConfig.getHeight() >= 1080)
{
if (streamConfig.getRefreshRate() >= 60)
{
// Update the initial set with the changed 1080p60 options
updateSetWithConfig(tupleSet, CONFIG_1080_60_DIFF);
}
else
{
// Update the initial set with the changed 1080p30 options
updateSetWithConfig(tupleSet, CONFIG_1080_30_DIFF);
}
}
return tupleSet;
}
public byte[] toWire() { public byte[] toWire() {
ByteBuffer bb = ByteBuffer.allocate(CONFIG_SIZE).order(ByteOrder.LITTLE_ENDIAN); ArrayList<ConfigTuple> tupleSet = generateTupleSet();
ByteBuffer bb = ByteBuffer.allocate(getConfigOnWireSize(tupleSet) + 4).order(ByteOrder.LITTLE_ENDIAN);
// Width for (ConfigTuple t : tupleSet)
bb.putShort((short) 0x1204); {
bb.putShort((short) 0x0004); bb.put(t.toWire());
bb.putInt(streamConfig.getWidth());
// Height
bb.putShort((short) 0x1205);
bb.putShort((short) 0x0004);
bb.putInt(streamConfig.getHeight());
// Unknown
bb.putShort((short) 0x1206);
bb.putShort((short) 0x0004);
bb.putInt(1);
// Refresh rate
bb.putShort((short) 0x120A);
bb.putShort((short) 0x0004);
bb.putInt(streamConfig.getRefreshRate());
// The rest are hardcoded
for (int i : UNKNOWN_CONFIG) {
bb.putInt(i);
} }
// Config tail // Config tail
bb.putShort((short) 0x0013); bb.putShort((short) 0x13fe);
bb.put((byte) 0x00); bb.putShort((short) 0x00);
return bb.array(); return bb.array();
} }

View File

@ -0,0 +1,53 @@
package com.limelight.nvstream.control;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public abstract class ConfigTuple {
public short packetType;
public short payloadLength;
public static final short HEADER_LENGTH = 4;
public ConfigTuple(short packetType, short payloadLength)
{
this.packetType = packetType;
this.payloadLength = payloadLength;
}
public abstract byte[] payloadToWire();
public byte[] toWire()
{
byte[] payload = payloadToWire();
ByteBuffer bb = ByteBuffer.allocate(HEADER_LENGTH + (payload != null ? payload.length : 0))
.order(ByteOrder.LITTLE_ENDIAN);
bb.putShort(packetType);
bb.putShort(payloadLength);
if (payload != null) {
bb.put(payload);
}
return bb.array();
}
@Override
public int hashCode()
{
return packetType;
}
@Override
public boolean equals(Object o)
{
// We only compare the packet types on purpose
if (o instanceof ConfigTuple) {
return ((ConfigTuple)o).packetType == packetType;
}
else {
return false;
}
}
}

View File

@ -8,6 +8,7 @@ import java.net.InetSocketAddress;
import java.net.Socket; import java.net.Socket;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.util.concurrent.LinkedBlockingQueue;
import com.limelight.LimeLog; import com.limelight.LimeLog;
import com.limelight.nvstream.NvConnectionListener; import com.limelight.nvstream.NvConnectionListener;
@ -45,6 +46,15 @@ public class ControlStream implements ConnectionStatusListener {
private InetAddress host; private InetAddress host;
private Config config; private Config config;
public static final int LOSS_PERIOD_MS = 15000;
public static final int MAX_LOSS_COUNT_IN_PERIOD = 3;
public static final int MAX_SLOW_SINK_COUNT = 3;
public static final int MESSAGE_DELAY_FACTOR = 5;
private long lossTimestamp;
private int lossCount;
private int slowSinkCount;
private Socket s; private Socket s;
private InputStream in; private InputStream in;
private OutputStream out; private OutputStream out;
@ -52,7 +62,7 @@ public class ControlStream implements ConnectionStatusListener {
private Thread heartbeatThread; private Thread heartbeatThread;
private Thread jitterThread; private Thread jitterThread;
private Thread resyncThread; private Thread resyncThread;
private Object resyncNeeded = new Object(); private LinkedBlockingQueue<int[]> invalidReferenceFrameTuples = new LinkedBlockingQueue<int[]>();
private boolean aborting = false; private boolean aborting = false;
public ControlStream(InetAddress host, NvConnectionListener listener, StreamConfiguration streamConfig) public ControlStream(InetAddress host, NvConnectionListener listener, StreamConfiguration streamConfig)
@ -132,12 +142,6 @@ public class ControlStream implements ConnectionStatusListener {
} }
} }
public void requestResync() throws IOException
{
LimeLog.info("CTL: Requesting IDR frame");
sendResync();
}
public void start() throws IOException public void start() throws IOException
{ {
// Use a finite timeout during the handshake process // Use a finite timeout during the handshake process
@ -180,18 +184,39 @@ public class ControlStream implements ConnectionStatusListener {
public void run() { public void run() {
while (!isInterrupted()) while (!isInterrupted())
{ {
int[] tuple;
// Wait for a tuple
try { try {
// Wait for notification of a resync needed tuple = invalidReferenceFrameTuples.take();
synchronized (resyncNeeded) {
resyncNeeded.wait();
}
} catch (InterruptedException e) { } catch (InterruptedException e) {
listener.connectionTerminated(e); listener.connectionTerminated(e);
return; return;
} }
// Aggregate all lost frames into one range
int[] lastTuple = null;
for (;;) {
int[] nextTuple = lastTuple = invalidReferenceFrameTuples.poll();
if (nextTuple == null) {
break;
}
lastTuple = nextTuple;
}
// The server expects this to be the firstLostFrame + 1
tuple[0]++;
// Update the end of the range to the latest tuple
if (lastTuple != null) {
tuple[1] = lastTuple[1];
}
try { try {
requestResync(); LimeLog.warning("Invalidating reference frames from "+tuple[0]+" to "+tuple[1]);
ControlStream.this.sendResync(tuple[0], tuple[1]);
LimeLog.warning("Frames invalidated");
} catch (IOException e) { } catch (IOException e) {
listener.connectionTerminated(e); listener.connectionTerminated(e);
return; return;
@ -235,10 +260,12 @@ public class ControlStream implements ConnectionStatusListener {
return sendAndGetReply(new NvCtlPacket(PTYPE_1405, PPAYLEN_1405)); return sendAndGetReply(new NvCtlPacket(PTYPE_1405, PPAYLEN_1405));
} }
private void sendResync() throws IOException private void sendResync(int firstLostFrame, int nextSuccessfulFrame) throws IOException
{ {
ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLEN_RESYNC]).order(ByteOrder.LITTLE_ENDIAN); ByteBuffer conf = ByteBuffer.wrap(new byte[PPAYLEN_RESYNC]).order(ByteOrder.LITTLE_ENDIAN);
//conf.putLong(firstLostFrame);
//conf.putLong(nextSuccessfulFrame);
conf.putLong(0); conf.putLong(0);
conf.putLong(0xFFFFF); conf.putLong(0xFFFFF);
@ -405,10 +432,32 @@ public class ControlStream implements ConnectionStatusListener {
abort(); abort();
} }
public void connectionNeedsResync() { private void resyncConnection(int firstLostFrame, int nextSuccessfulFrame) {
synchronized (resyncNeeded) { invalidReferenceFrameTuples.add(new int[]{firstLostFrame, nextSuccessfulFrame});
// Wake up the resync thread
resyncNeeded.notify();
} }
public void connectionDetectedFrameLoss(int firstLostFrame, int nextSuccessfulFrame) {
if (System.currentTimeMillis() > LOSS_PERIOD_MS + lossTimestamp) {
lossCount++;
lossTimestamp = System.currentTimeMillis();
}
else {
if (++lossCount == MAX_LOSS_COUNT_IN_PERIOD) {
listener.displayTransientMessage("Detected excessive A/V data loss. Try improving your network connection or lowering stream settings.");
lossCount = -MAX_LOSS_COUNT_IN_PERIOD * MESSAGE_DELAY_FACTOR;
lossTimestamp = 0;
}
}
resyncConnection(firstLostFrame, nextSuccessfulFrame);
}
public void connectionSinkTooSlow(int firstLostFrame, int nextSuccessfulFrame) {
if (++slowSinkCount == MAX_SLOW_SINK_COUNT) {
listener.displayTransientMessage("Your device is processing the A/V data too slowly. Try lowering stream settings.");
slowSinkCount = -MAX_SLOW_SINK_COUNT * MESSAGE_DELAY_FACTOR;
}
resyncConnection(firstLostFrame, nextSuccessfulFrame);
} }
} }

View File

@ -0,0 +1,23 @@
package com.limelight.nvstream.control;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class IntConfigTuple extends ConfigTuple {
public static final short PAYLOAD_LENGTH = 4;
public int payload;
public IntConfigTuple(short packetType, int payload) {
super(packetType, PAYLOAD_LENGTH);
this.payload = payload;
}
@Override
public byte[] payloadToWire() {
ByteBuffer bb = ByteBuffer.allocate(PAYLOAD_LENGTH).order(ByteOrder.LITTLE_ENDIAN);
bb.putInt(payload);
return bb.array();
}
}

View File

@ -0,0 +1,23 @@
package com.limelight.nvstream.control;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class ShortConfigTuple extends ConfigTuple {
public static final short PAYLOAD_LENGTH = 2;
public short payload;
public ShortConfigTuple(short packetType, short payload) {
super(packetType, PAYLOAD_LENGTH);
this.payload = payload;
}
@Override
public byte[] payloadToWire() {
ByteBuffer bb = ByteBuffer.allocate(PAYLOAD_LENGTH).order(ByteOrder.LITTLE_ENDIAN);
bb.putShort(payload);
return bb.array();
}
}