Allow applications to reliably calculate decode unit queue delay

This commit is contained in:
Cameron Gutman
2020-12-31 16:08:00 -06:00
parent 3aa2463856
commit fd950b6452
2 changed files with 10 additions and 3 deletions

View File

@@ -136,11 +136,17 @@ typedef struct _DECODE_UNIT {
// Frame type
int frameType;
// Receive time of first buffer. This value uses an implementation-defined epoch.
// To compute actual latency values, use LiGetMillis() to get a timestamp that
// shares the same epoch as this value.
// Receive time of first buffer. This value uses an implementation-defined epoch,
// but the same epoch as enqueueTimeMs and LiGetMillis().
uint64_t receiveTimeMs;
// Time the frame was fully assembled and queued for the video decoder to process.
// This is also approximately the same time as the final packet was received, so
// enqueueTimeMs - receiveTimeMs is the time taken to receive the frame. At the
// time the decode unit is passed to submitDecodeUnit(), the total queue delay
// can be calculated by LiGetMillis() - enqueueTimeMs.
uint64_t enqueueTimeMs;
// Presentation time in milliseconds with the epoch at the first captured frame.
// This can be used to aid frame pacing or to drop old frames that were queued too
// long prior to display.

View File

@@ -253,6 +253,7 @@ static void reassembleFrame(int frameNumber) {
qdu->decodeUnit.frameNumber = frameNumber;
qdu->decodeUnit.receiveTimeMs = firstPacketReceiveTime;
qdu->decodeUnit.presentationTimeMs = firstPacketPresentationTime;
qdu->decodeUnit.enqueueTimeMs = LiGetMillis();
// IDR frames will have leading CSD buffers
if (nalChainHead->bufferType != BUFFER_TYPE_PICDATA) {