Convert all time-based stats to be microsecond-based, improving accuracy of very fast events. Add bitrate average/peak.

This commit is contained in:
Andy Grundman
2024-09-13 13:56:10 -04:00
committed by Cameron Gutman
parent 1160f40655
commit 66b96e75af
11 changed files with 278 additions and 52 deletions

View File

@@ -227,6 +227,7 @@ FFmpegVideoDecoder::FFmpegVideoDecoder(bool testOnly)
m_FrontendRenderer(nullptr),
m_ConsecutiveFailedDecodes(0),
m_Pacer(nullptr),
m_BwTracker(10, 250),
m_FramesIn(0),
m_FramesOut(0),
m_LastFrameNumber(0),
@@ -520,6 +521,8 @@ bool FFmpegVideoDecoder::completeInitialization(const AVCodec* decoder, enum AVP
m_VideoDecoderCtx->width = params->width;
m_VideoDecoderCtx->height = params->height;
m_VideoDecoderCtx->get_format = ffGetFormat;
m_VideoDecoderCtx->pkt_timebase.num = 1;
m_VideoDecoderCtx->pkt_timebase.den = 90000;
// For non-hwaccel decoders, set the pix_fmt to hint to the decoder which
// format should be used. This is necessary for certain decoders like the
@@ -713,10 +716,10 @@ void FFmpegVideoDecoder::addVideoStats(VIDEO_STATS& src, VIDEO_STATS& dst)
dst.totalFrames += src.totalFrames;
dst.networkDroppedFrames += src.networkDroppedFrames;
dst.pacerDroppedFrames += src.pacerDroppedFrames;
dst.totalReassemblyTime += src.totalReassemblyTime;
dst.totalDecodeTime += src.totalDecodeTime;
dst.totalPacerTime += src.totalPacerTime;
dst.totalRenderTime += src.totalRenderTime;
dst.totalReassemblyTimeUs += src.totalReassemblyTimeUs;
dst.totalDecodeTimeUs += src.totalDecodeTimeUs;
dst.totalPacerTimeUs += src.totalPacerTimeUs;
dst.totalRenderTimeUs += src.totalRenderTimeUs;
if (dst.minHostProcessingLatency == 0) {
dst.minHostProcessingLatency = src.minHostProcessingLatency;
@@ -738,20 +741,19 @@ void FFmpegVideoDecoder::addVideoStats(VIDEO_STATS& src, VIDEO_STATS& dst)
SDL_assert(dst.lastRtt > 0);
}
Uint32 now = SDL_GetTicks();
// Initialize the measurement start point if this is the first video stat window
if (!dst.measurementStartTimestamp) {
dst.measurementStartTimestamp = src.measurementStartTimestamp;
if (!dst.measurementStartUs) {
dst.measurementStartUs = src.measurementStartUs;
}
// The following code assumes the global measure was already started first
SDL_assert(dst.measurementStartTimestamp <= src.measurementStartTimestamp);
SDL_assert(dst.measurementStartUs <= src.measurementStartUs);
dst.totalFps = (float)dst.totalFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
dst.receivedFps = (float)dst.receivedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
dst.decodedFps = (float)dst.decodedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
dst.renderedFps = (float)dst.renderedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
double timeDiffSecs = (double)(LiGetMicroseconds() - dst.measurementStartUs) / 1000000.0;
dst.totalFps = (double)dst.totalFrames / timeDiffSecs;
dst.receivedFps = (double)dst.receivedFrames / timeDiffSecs;
dst.decodedFps = (double)dst.decodedFrames / timeDiffSecs;
dst.renderedFps = (double)dst.renderedFrames / timeDiffSecs;
}
void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, int length)
@@ -833,13 +835,20 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i
if (stats.receivedFps > 0) {
if (m_VideoDecoderCtx != nullptr) {
double avgVideoMbps = m_BwTracker.GetAverageMbps();
double peakVideoMbps = m_BwTracker.GetPeakMbps();
ret = snprintf(&output[offset],
length - offset,
"Video stream: %dx%d %.2f FPS (Codec: %s)\n",
"Video stream: %dx%d %.2f FPS (Codec: %s)\n"
"Bitrate: %.1f Mbps, Peak (%us): %.1f\n",
m_VideoDecoderCtx->width,
m_VideoDecoderCtx->height,
stats.totalFps,
codecString);
codecString,
avgVideoMbps,
m_BwTracker.GetWindowSeconds(),
peakVideoMbps);
if (ret < 0 || ret >= length - offset) {
SDL_assert(false);
return;
@@ -900,9 +909,9 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i
(float)stats.networkDroppedFrames / stats.totalFrames * 100,
(float)stats.pacerDroppedFrames / stats.decodedFrames * 100,
rttString,
(float)stats.totalDecodeTime / stats.decodedFrames,
(float)stats.totalPacerTime / stats.renderedFrames,
(float)stats.totalRenderTime / stats.renderedFrames);
(double)(stats.totalDecodeTimeUs / 1000.0) / stats.decodedFrames,
(double)(stats.totalPacerTimeUs / 1000.0) / stats.renderedFrames,
(double)(stats.totalRenderTimeUs / 1000.0) / stats.renderedFrames);
if (ret < 0 || ret >= length - offset) {
SDL_assert(false);
return;
@@ -919,10 +928,8 @@ void FFmpegVideoDecoder::logVideoStats(VIDEO_STATS& stats, const char* title)
stringifyVideoStats(stats, videoStatsStr, sizeof(videoStatsStr));
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"%s", title);
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"----------------------------------------------------------\n%s",
videoStatsStr);
"\n%s\n------------------\n%s",
title, videoStatsStr);
}
}
@@ -1744,7 +1751,7 @@ void FFmpegVideoDecoder::decoderThreadProc()
av_log_set_level(AV_LOG_INFO);
// Capture a frame timestamp to measuring pacing delay
frame->pkt_dts = SDL_GetTicks();
frame->pkt_dts = LiGetMicroseconds();
if (!m_FrameInfoQueue.isEmpty()) {
// Data buffers in the DU are not valid here!
@@ -1753,10 +1760,10 @@ void FFmpegVideoDecoder::decoderThreadProc()
// Count time in avcodec_send_packet() and avcodec_receive_frame()
// as time spent decoding. Also count time spent in the decode unit
// queue because that's directly caused by decoder latency.
m_ActiveWndVideoStats.totalDecodeTime += LiGetMillis() - du.enqueueTimeMs;
m_ActiveWndVideoStats.totalDecodeTimeUs += (LiGetMicroseconds() - du.enqueueTimeUs);
// Store the presentation time
frame->pts = du.presentationTimeMs;
// Store the presentation time (90 kHz timebase)
frame->pts = (int64_t)du.rtpTimestamp;
}
m_ActiveWndVideoStats.decodedFrames++;
@@ -1829,7 +1836,7 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
}
if (!m_LastFrameNumber) {
m_ActiveWndVideoStats.measurementStartTimestamp = SDL_GetTicks();
m_ActiveWndVideoStats.measurementStartUs = LiGetMicroseconds();
m_LastFrameNumber = du->frameNumber;
}
else {
@@ -1839,8 +1846,10 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
m_LastFrameNumber = du->frameNumber;
}
m_BwTracker.AddBytes(du->fullLength);
// Flip stats windows roughly every second
if (SDL_TICKS_PASSED(SDL_GetTicks(), m_ActiveWndVideoStats.measurementStartTimestamp + 1000)) {
if (LiGetMicroseconds() > m_ActiveWndVideoStats.measurementStartUs + 1000000) {
// Update overlay stats if it's enabled
if (Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebug)) {
VIDEO_STATS lastTwoWndStats = {};
@@ -1859,7 +1868,7 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
// Move this window into the last window slot and clear it for next window
SDL_memcpy(&m_LastWndVideoStats, &m_ActiveWndVideoStats, sizeof(m_ActiveWndVideoStats));
SDL_zero(m_ActiveWndVideoStats);
m_ActiveWndVideoStats.measurementStartTimestamp = SDL_GetTicks();
m_ActiveWndVideoStats.measurementStartUs = LiGetMicroseconds();
}
if (du->frameHostProcessingLatency != 0) {
@@ -1902,7 +1911,7 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
m_Pkt->flags = 0;
}
m_ActiveWndVideoStats.totalReassemblyTime += du->enqueueTimeMs - du->receiveTimeMs;
m_ActiveWndVideoStats.totalReassemblyTimeUs += (du->enqueueTimeUs - du->receiveTimeUs);
err = avcodec_send_packet(m_VideoDecoderCtx, m_Pkt);
if (err < 0) {