mirror of
https://github.com/moonlight-stream/moonlight-common-c.git
synced 2026-02-16 02:21:07 +00:00
Change presentationTimeMs to presentationTimeUs: retain more resolution when dealing with RTP video timestamps from Sunshine. Include raw rtpTimestamp value for use with integer time APIs.
This commit is contained in:
committed by
Cameron Gutman
parent
fdd026518c
commit
e356b2cfde
@@ -164,10 +164,15 @@ typedef struct _DECODE_UNIT {
|
||||
// can be calculated. This value is in microseconds.
|
||||
uint64_t enqueueTimeUs;
|
||||
|
||||
// Presentation time in milliseconds with the epoch at the first captured frame.
|
||||
// Presentation time in microseconds with the epoch at the first captured frame.
|
||||
// This can be used to aid frame pacing or to drop old frames that were queued too
|
||||
// long prior to display.
|
||||
uint64_t presentationTimeMs;
|
||||
uint64_t presentationTimeUs;
|
||||
|
||||
// Original RTP timestamp in 90kHz units. Useful when using APIs that deal with integer
|
||||
// time such as Apple's CMTime. To exactly recover the RTP timestamp, use something like
|
||||
// CMTimeMake((int64_t)du->rtpTimestamp, 90000);
|
||||
uint32_t rtpTimestamp;
|
||||
|
||||
// Length of the entire buffer chain in bytes
|
||||
int fullLength;
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
// Don't try speculative RFI for 5 minutes after seeing
|
||||
// an out of order packet or incorrect prediction
|
||||
#define SPECULATIVE_RFI_COOLDOWN_PERIOD_MS 300000
|
||||
#define SPECULATIVE_RFI_COOLDOWN_PERIOD_US 300000000
|
||||
|
||||
// RTP packets use a 90 KHz presentation timestamp clock
|
||||
#define PTS_DIVISOR 90
|
||||
@@ -154,21 +154,22 @@ static bool queuePacket(PRTP_VIDEO_QUEUE queue, PRTPV_QUEUE_ENTRY newEntry, PRTP
|
||||
newEntry->isParity = isParity;
|
||||
newEntry->prev = NULL;
|
||||
newEntry->next = NULL;
|
||||
newEntry->presentationTimeMs = packet->timestamp / PTS_DIVISOR;
|
||||
newEntry->presentationTimeUs = ((uint64_t)packet->timestamp * 1000) / PTS_DIVISOR;
|
||||
newEntry->rtpTimestamp = packet->timestamp;
|
||||
|
||||
// FEC recovery packets are synthesized by us, so don't use them to determine OOS data
|
||||
if (!isFecRecovery) {
|
||||
if (outOfSequence) {
|
||||
// This packet was received after a higher sequence number packet, so note that we
|
||||
// received an out of order packet to disable our speculative RFI recovery logic.
|
||||
queue->lastOosFramePresentationTimestamp = newEntry->presentationTimeMs;
|
||||
queue->lastOosFramePresentationTimestamp = newEntry->presentationTimeUs;
|
||||
if (!queue->receivedOosData) {
|
||||
Limelog("Leaving speculative RFI mode after OOS video data at frame %u\n",
|
||||
queue->currentFrameNumber);
|
||||
queue->receivedOosData = true;
|
||||
}
|
||||
}
|
||||
else if (queue->receivedOosData && newEntry->presentationTimeMs > queue->lastOosFramePresentationTimestamp + SPECULATIVE_RFI_COOLDOWN_PERIOD_MS) {
|
||||
else if (queue->receivedOosData && newEntry->presentationTimeUs > queue->lastOosFramePresentationTimestamp + SPECULATIVE_RFI_COOLDOWN_PERIOD_US) {
|
||||
Limelog("Entering speculative RFI mode after sequenced video data at frame %u\n",
|
||||
queue->currentFrameNumber);
|
||||
queue->receivedOosData = false;
|
||||
@@ -234,7 +235,7 @@ static int reconstructFrame(PRTP_VIDEO_QUEUE queue) {
|
||||
if (queue->reportedLostFrame && !queue->receivedOosData) {
|
||||
// If it turns out that we lied to the host, stop further speculative RFI requests for a while.
|
||||
queue->receivedOosData = true;
|
||||
queue->lastOosFramePresentationTimestamp = queue->pendingFecBlockList.head->presentationTimeMs;
|
||||
queue->lastOosFramePresentationTimestamp = queue->pendingFecBlockList.head->presentationTimeUs;
|
||||
Limelog("Leaving speculative RFI mode due to incorrect loss prediction of frame %u\n", queue->currentFrameNumber);
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,8 @@ typedef struct _RTPV_QUEUE_ENTRY {
|
||||
struct _RTPV_QUEUE_ENTRY* prev;
|
||||
PRTP_PACKET packet;
|
||||
uint64_t receiveTimeUs;
|
||||
uint64_t presentationTimeMs;
|
||||
uint64_t presentationTimeUs;
|
||||
uint32_t rtpTimestamp;
|
||||
int length;
|
||||
bool isParity;
|
||||
} RTPV_QUEUE_ENTRY, *PRTPV_QUEUE_ENTRY;
|
||||
|
||||
@@ -21,6 +21,7 @@ static uint64_t syntheticPtsBaseUs;
|
||||
static uint16_t frameHostProcessingLatency;
|
||||
static uint64_t firstPacketReceiveTimeUs;
|
||||
static uint64_t firstPacketPresentationTime;
|
||||
static uint32_t firstPacketRtpTimestamp;
|
||||
static bool dropStatePending;
|
||||
static bool idrFrameProcessed;
|
||||
|
||||
@@ -72,6 +73,7 @@ void initializeVideoDepacketizer(int pktSize) {
|
||||
frameHostProcessingLatency = 0;
|
||||
firstPacketReceiveTimeUs = 0;
|
||||
firstPacketPresentationTime = 0;
|
||||
firstPacketRtpTimestamp = 0;
|
||||
lastPacketPayloadLength = 0;
|
||||
dropStatePending = false;
|
||||
idrFrameProcessed = false;
|
||||
@@ -484,7 +486,8 @@ static void reassembleFrame(int frameNumber) {
|
||||
qdu->decodeUnit.frameNumber = frameNumber;
|
||||
qdu->decodeUnit.frameHostProcessingLatency = frameHostProcessingLatency;
|
||||
qdu->decodeUnit.receiveTimeUs = firstPacketReceiveTimeUs;
|
||||
qdu->decodeUnit.presentationTimeMs = firstPacketPresentationTime;
|
||||
qdu->decodeUnit.presentationTimeUs = firstPacketPresentationTime;
|
||||
qdu->decodeUnit.rtpTimestamp = firstPacketRtpTimestamp;
|
||||
qdu->decodeUnit.enqueueTimeUs = PltGetMicroseconds();
|
||||
|
||||
// These might be wrong for a few frames during a transition between SDR and HDR,
|
||||
@@ -502,7 +505,7 @@ static void reassembleFrame(int frameNumber) {
|
||||
else {
|
||||
qdu->decodeUnit.frameType = FRAME_TYPE_PFRAME;
|
||||
}
|
||||
|
||||
|
||||
nalChainHead = nalChainTail = NULL;
|
||||
nalChainDataLength = 0;
|
||||
|
||||
@@ -740,7 +743,7 @@ static bool isFirstPacket(uint8_t flags, uint8_t fecBlockNumber) {
|
||||
// Process an RTP Payload
|
||||
// The caller will free *existingEntry unless we NULL it
|
||||
static void processRtpPayload(PNV_VIDEO_PACKET videoPacket, int length,
|
||||
uint64_t receiveTimeUs, uint64_t presentationTimeMs,
|
||||
uint64_t receiveTimeUs, uint64_t presentationTimeUs, uint32_t rtpTimestamp,
|
||||
PLENTRY_INTERNAL* existingEntry) {
|
||||
BUFFER_DESC currentPos;
|
||||
uint32_t frameIndex;
|
||||
@@ -831,12 +834,14 @@ static void processRtpPayload(PNV_VIDEO_PACKET videoPacket, int length,
|
||||
syntheticPtsBaseUs = receiveTimeUs;
|
||||
}
|
||||
|
||||
if (!presentationTimeMs && frameIndex > 0) {
|
||||
firstPacketPresentationTime = (receiveTimeUs - syntheticPtsBaseUs) / 1000;
|
||||
if (!presentationTimeUs && frameIndex > 0) {
|
||||
firstPacketPresentationTime = receiveTimeUs - syntheticPtsBaseUs;
|
||||
}
|
||||
else {
|
||||
firstPacketPresentationTime = presentationTimeMs;
|
||||
firstPacketPresentationTime = presentationTimeUs;
|
||||
}
|
||||
|
||||
firstPacketRtpTimestamp = rtpTimestamp;
|
||||
}
|
||||
|
||||
lastPacketInStream = streamPacketIndex;
|
||||
@@ -1174,7 +1179,8 @@ void queueRtpPacket(PRTPV_QUEUE_ENTRY queueEntryPtr) {
|
||||
processRtpPayload((PNV_VIDEO_PACKET)(((char*)queueEntry.packet) + dataOffset),
|
||||
queueEntry.length - dataOffset,
|
||||
queueEntry.receiveTimeUs,
|
||||
queueEntry.presentationTimeMs,
|
||||
queueEntry.presentationTimeUs,
|
||||
queueEntry.rtpTimestamp,
|
||||
&existingEntry);
|
||||
|
||||
if (existingEntry != NULL) {
|
||||
|
||||
Reference in New Issue
Block a user