mirror of
https://github.com/moonlight-stream/moonlight-common-c.git
synced 2026-02-16 02:21:07 +00:00
Improve support for high-resolution stats
* This patch adds a new microsecond-resolution function call, LiGetMicroseconds(), to complement the existing LiGetMillis(). Many variables used by stats have been updated to work at this higher resolution and now provide better results when displaying e.g. sub-millisecond frametime stats. To try and avoid confusion, variables that now contain microseconds have been renamed with a suffix of 'Us', and those ending in 'Ms' contain milliseconds. I originally experimented with nanoseconds but it felt like overkill for our needs. Public API in Limelight.h: uint64_t LiGetMicroseconds(void); uint64_t LiGetMillis(void); const RTP_AUDIO_STATS* LiGetRTPAudioStats(void); // provides access to RTP data for the overlay stats const RTP_VIDEO_STATS* LiGetRTPVideoStats(void); Note: Users of this library may need to make changes. If using LiGetMillis() to track the duration of something that is shown to the user, consider switching to LiGetMicroseconds(). Remember to divide by 1000 at time of display to show in milliseconds.
This commit is contained in:
committed by
Cameron Gutman
parent
5f2280183c
commit
82ee2d6590
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,5 @@
|
|||||||
.idea/
|
.idea/
|
||||||
|
.vscode/
|
||||||
limelight-common/ARM/
|
limelight-common/ARM/
|
||||||
limelight-common/Debug/
|
limelight-common/Debug/
|
||||||
Build/
|
Build/
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
cmake_minimum_required(VERSION 3.1...4.0)
|
cmake_minimum_required(VERSION 3.1...4.0)
|
||||||
project(moonlight-common-c LANGUAGES C)
|
project(moonlight-common-c LANGUAGES C)
|
||||||
|
|
||||||
|
string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE)
|
||||||
set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
|
set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
|
||||||
|
|
||||||
option(USE_MBEDTLS "Use MbedTLS instead of OpenSSL" OFF)
|
option(USE_MBEDTLS "Use MbedTLS instead of OpenSSL" OFF)
|
||||||
@@ -61,7 +62,6 @@ else()
|
|||||||
target_include_directories(moonlight-common-c SYSTEM PRIVATE ${OPENSSL_INCLUDE_DIR})
|
target_include_directories(moonlight-common-c SYSTEM PRIVATE ${OPENSSL_INCLUDE_DIR})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE)
|
|
||||||
if("${BUILD_TYPE}" STREQUAL "XDEBUG")
|
if("${BUILD_TYPE}" STREQUAL "XDEBUG")
|
||||||
target_compile_definitions(moonlight-common-c PRIVATE LC_DEBUG)
|
target_compile_definitions(moonlight-common-c PRIVATE LC_DEBUG)
|
||||||
else()
|
else()
|
||||||
@@ -74,6 +74,36 @@ else()
|
|||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
if (NOT(MSVC OR APPLE))
|
||||||
|
include(CheckLibraryExists)
|
||||||
|
CHECK_LIBRARY_EXISTS(rt clock_gettime "" HAVE_CLOCK_GETTIME)
|
||||||
|
|
||||||
|
if (NOT HAVE_CLOCK_GETTIME)
|
||||||
|
set(CMAKE_EXTRA_INCLUDE_FILES time.h)
|
||||||
|
CHECK_FUNCTION_EXISTS(clock_gettime HAVE_CLOCK_GETTIME)
|
||||||
|
SET(CMAKE_EXTRA_INCLUDE_FILES)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(clock CLOCK_MONOTONIC CLOCK_MONOTONIC_RAW)
|
||||||
|
message(STATUS "Testing whether ${clock} can be used")
|
||||||
|
CHECK_CXX_SOURCE_COMPILES(
|
||||||
|
"#define _POSIX_C_SOURCE 200112L
|
||||||
|
#include <time.h>
|
||||||
|
int main ()
|
||||||
|
{
|
||||||
|
struct timespec ts[1];
|
||||||
|
clock_gettime (${clock}, ts);
|
||||||
|
return 0;
|
||||||
|
}" HAVE_${clock})
|
||||||
|
if(HAVE_${clock})
|
||||||
|
message(STATUS "Testing whether ${clock} can be used -- Success")
|
||||||
|
else()
|
||||||
|
message(STATUS "Testing whether ${clock} can be used -- Failed")
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
endif()
|
||||||
|
|
||||||
target_include_directories(moonlight-common-c SYSTEM PUBLIC src)
|
target_include_directories(moonlight-common-c SYSTEM PUBLIC src)
|
||||||
|
|
||||||
target_include_directories(moonlight-common-c PRIVATE
|
target_include_directories(moonlight-common-c PRIVATE
|
||||||
|
|||||||
@@ -299,6 +299,8 @@ static void AudioReceiveThreadProc(void* context) {
|
|||||||
Limelog("Received first audio packet after %d ms\n", waitingForAudioMs);
|
Limelog("Received first audio packet after %d ms\n", waitingForAudioMs);
|
||||||
|
|
||||||
if (firstReceiveTime != 0) {
|
if (firstReceiveTime != 0) {
|
||||||
|
// XXX firstReceiveTime is never set here...
|
||||||
|
// We're already dropping 500ms of audio so this probably doesn't matter
|
||||||
packetsToDrop += (uint32_t)(PltGetMillis() - firstReceiveTime) / AudioPacketDuration;
|
packetsToDrop += (uint32_t)(PltGetMillis() - firstReceiveTime) / AudioPacketDuration;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -474,3 +476,7 @@ int LiGetPendingAudioFrames(void) {
|
|||||||
int LiGetPendingAudioDuration(void) {
|
int LiGetPendingAudioDuration(void) {
|
||||||
return LiGetPendingAudioFrames() * AudioPacketDuration;
|
return LiGetPendingAudioFrames() * AudioPacketDuration;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const RTP_AUDIO_STATS* LiGetRTPAudioStats(void) {
|
||||||
|
return &rtpAudioQueue.stats;
|
||||||
|
}
|
||||||
|
|||||||
@@ -154,16 +154,15 @@ typedef struct _DECODE_UNIT {
|
|||||||
// (happens when the frame is repeated).
|
// (happens when the frame is repeated).
|
||||||
uint16_t frameHostProcessingLatency;
|
uint16_t frameHostProcessingLatency;
|
||||||
|
|
||||||
// Receive time of first buffer. This value uses an implementation-defined epoch,
|
// Receive time of first buffer in microseconds.
|
||||||
// but the same epoch as enqueueTimeMs and LiGetMillis().
|
uint64_t receiveTimeUs;
|
||||||
uint64_t receiveTimeMs;
|
|
||||||
|
|
||||||
// Time the frame was fully assembled and queued for the video decoder to process.
|
// Time the frame was fully assembled and queued for the video decoder to process.
|
||||||
// This is also approximately the same time as the final packet was received, so
|
// This is also approximately the same time as the final packet was received, so
|
||||||
// enqueueTimeMs - receiveTimeMs is the time taken to receive the frame. At the
|
// enqueueTimeUs - receiveTimeUs is the time taken to receive the frame. At the
|
||||||
// time the decode unit is passed to submitDecodeUnit(), the total queue delay
|
// time the decode unit is passed to submitDecodeUnit(), the total queue delay
|
||||||
// can be calculated by LiGetMillis() - enqueueTimeMs.
|
// can be calculated. This value is in microseconds.
|
||||||
uint64_t enqueueTimeMs;
|
uint64_t enqueueTimeUs;
|
||||||
|
|
||||||
// Presentation time in milliseconds with the epoch at the first captured frame.
|
// Presentation time in milliseconds with the epoch at the first captured frame.
|
||||||
// This can be used to aid frame pacing or to drop old frames that were queued too
|
// This can be used to aid frame pacing or to drop old frames that were queued too
|
||||||
@@ -833,7 +832,12 @@ int LiSendHighResScrollEvent(short scrollAmount);
|
|||||||
int LiSendHScrollEvent(signed char scrollClicks);
|
int LiSendHScrollEvent(signed char scrollClicks);
|
||||||
int LiSendHighResHScrollEvent(short scrollAmount);
|
int LiSendHighResHScrollEvent(short scrollAmount);
|
||||||
|
|
||||||
|
// This function returns a time in microseconds with an implementation-defined epoch.
|
||||||
|
// It should only ever be compared with the return value from a previous call to itself.
|
||||||
|
uint64_t LiGetMicroseconds(void);
|
||||||
|
|
||||||
// This function returns a time in milliseconds with an implementation-defined epoch.
|
// This function returns a time in milliseconds with an implementation-defined epoch.
|
||||||
|
// It should only ever be compared with the return value from a previous call to itself.
|
||||||
uint64_t LiGetMillis(void);
|
uint64_t LiGetMillis(void);
|
||||||
|
|
||||||
// This is a simplistic STUN function that can assist clients in getting the WAN address
|
// This is a simplistic STUN function that can assist clients in getting the WAN address
|
||||||
@@ -856,6 +860,36 @@ int LiGetPendingAudioFrames(void);
|
|||||||
// negotiated audio frame duration.
|
// negotiated audio frame duration.
|
||||||
int LiGetPendingAudioDuration(void);
|
int LiGetPendingAudioDuration(void);
|
||||||
|
|
||||||
|
// Returns a pointer to a struct containing various statistics about the RTP audio stream.
|
||||||
|
// The data should be considered read-only and must not be modified.
|
||||||
|
typedef struct _RTP_AUDIO_STATS {
|
||||||
|
uint32_t packetCountAudio; // total audio packets
|
||||||
|
uint32_t packetCountFec; // total packets of type FEC
|
||||||
|
uint32_t packetCountFecRecovered; // a packet was saved
|
||||||
|
uint32_t packetCountFecFailed; // tried to recover but too much was lost
|
||||||
|
uint32_t packetCountOOS; // out-of-sequence packets
|
||||||
|
uint32_t packetCountInvalid; // corrupted packets, etc
|
||||||
|
uint32_t packetCountFecInvalid; // invalid FEC packet
|
||||||
|
} RTP_AUDIO_STATS, *PRTP_AUDIO_STATS;
|
||||||
|
|
||||||
|
const RTP_AUDIO_STATS* LiGetRTPAudioStats(void);
|
||||||
|
|
||||||
|
// Returns a pointer to a struct containing various statistics about the RTP video stream.
|
||||||
|
// The data should be considered read-only and must not be modified.
|
||||||
|
// Right now this is mainly used to track total video and FEC packets, as there are
|
||||||
|
// many video stats already implemented at a higher level in moonlight-qt.
|
||||||
|
typedef struct _RTP_VIDEO_STATS {
|
||||||
|
uint32_t packetCountVideo; // total video packets
|
||||||
|
uint32_t packetCountFec; // total packets of type FEC
|
||||||
|
uint32_t packetCountFecRecovered; // a packet was saved
|
||||||
|
uint32_t packetCountFecFailed; // tried to recover but too much was lost
|
||||||
|
uint32_t packetCountOOS; // out-of-sequence packets
|
||||||
|
uint32_t packetCountInvalid; // corrupted packets, etc
|
||||||
|
uint32_t packetCountFecInvalid; // invalid FEC packet
|
||||||
|
} RTP_VIDEO_STATS, *PRTP_VIDEO_STATS;
|
||||||
|
|
||||||
|
const RTP_VIDEO_STATS* LiGetRTPVideoStats(void);
|
||||||
|
|
||||||
// Port index flags for use with LiGetPortFromPortFlagIndex() and LiGetProtocolFromPortFlagIndex()
|
// Port index flags for use with LiGetPortFromPortFlagIndex() and LiGetProtocolFromPortFlagIndex()
|
||||||
#define ML_PORT_INDEX_TCP_47984 0
|
#define ML_PORT_INDEX_TCP_47984 0
|
||||||
#define ML_PORT_INDEX_TCP_47989 1
|
#define ML_PORT_INDEX_TCP_47989 1
|
||||||
|
|||||||
@@ -148,6 +148,10 @@ uint64_t LiGetMillis(void) {
|
|||||||
return PltGetMillis();
|
return PltGetMillis();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint64_t LiGetMicroseconds(void) {
|
||||||
|
return PltGetMicroseconds();
|
||||||
|
}
|
||||||
|
|
||||||
uint32_t LiGetHostFeatureFlags(void) {
|
uint32_t LiGetHostFeatureFlags(void) {
|
||||||
return SunshineFeatureFlags;
|
return SunshineFeatureFlags;
|
||||||
}
|
}
|
||||||
|
|||||||
161
src/Platform.c
161
src/Platform.c
@@ -419,24 +419,157 @@ void PltWaitForConditionVariable(PLT_COND* cond, PLT_MUTEX* mutex) {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t PltGetMillis(void) {
|
//// Begin timing functions
|
||||||
|
|
||||||
|
// These functions return a number of microseconds or milliseconds since an opaque start time.
|
||||||
|
|
||||||
|
static bool has_monotonic_time = false;
|
||||||
|
static bool ticks_started = false;
|
||||||
|
|
||||||
#if defined(LC_WINDOWS)
|
#if defined(LC_WINDOWS)
|
||||||
return GetTickCount64();
|
|
||||||
#elif defined(CLOCK_MONOTONIC) && !defined(NO_CLOCK_GETTIME)
|
|
||||||
struct timespec tv;
|
|
||||||
|
|
||||||
clock_gettime(CLOCK_MONOTONIC, &tv);
|
static LARGE_INTEGER start_ticks;
|
||||||
|
static LARGE_INTEGER ticks_per_second;
|
||||||
|
|
||||||
return ((uint64_t)tv.tv_sec * 1000) + (tv.tv_nsec / 1000000);
|
void PltTicksInit(void) {
|
||||||
#else
|
if (ticks_started) {
|
||||||
struct timeval tv;
|
return;
|
||||||
|
}
|
||||||
gettimeofday(&tv, NULL);
|
ticks_started = true;
|
||||||
|
QueryPerformanceFrequency(&ticks_per_second);
|
||||||
return ((uint64_t)tv.tv_sec * 1000) + (tv.tv_usec / 1000);
|
QueryPerformanceCounter(&start_ticks);
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint64_t PltGetMicroseconds(void) {
|
||||||
|
if (!ticks_started) {
|
||||||
|
PltTicksInit();
|
||||||
|
}
|
||||||
|
LARGE_INTEGER now;
|
||||||
|
QueryPerformanceCounter(&now);
|
||||||
|
return (uint64_t)(((now.QuadPart - start_ticks.QuadPart) * 1000000) / ticks_per_second.QuadPart);
|
||||||
|
}
|
||||||
|
|
||||||
|
#elif defined(LC_DARWIN)
|
||||||
|
|
||||||
|
static mach_timebase_info_data_t mach_base_info;
|
||||||
|
static uint64_t start;
|
||||||
|
|
||||||
|
void PltTicksInit(void) {
|
||||||
|
if (ticks_started) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ticks_started = true;
|
||||||
|
mach_timebase_info(&mach_base_info);
|
||||||
|
has_monotonic_time = true;
|
||||||
|
start = mach_absolute_time();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t PltGetMicroseconds(void) {
|
||||||
|
if (!ticks_started) {
|
||||||
|
PltTicksInit();
|
||||||
|
}
|
||||||
|
const uint64_t now = mach_absolute_time();
|
||||||
|
return (((now - start) * mach_base_info.numer) / mach_base_info.denom) / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
#elif defined(__vita__)
|
||||||
|
|
||||||
|
static uint64_t start;
|
||||||
|
|
||||||
|
void PltTicksInit(void) {
|
||||||
|
if (ticks_started) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ticks_started = true;
|
||||||
|
start = sceKernelGetProcessTimeWide();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t PltGetMicroseconds(void) {
|
||||||
|
if (!ticks_started) {
|
||||||
|
PltTicksInit();
|
||||||
|
}
|
||||||
|
uint64_t now = sceKernelGetProcessTimeWide();
|
||||||
|
return (uint64_t)(now - start);
|
||||||
|
}
|
||||||
|
|
||||||
|
#elif defined(__3DS__)
|
||||||
|
|
||||||
|
static uint64_t start;
|
||||||
|
|
||||||
|
void PltTicksInit(void) {
|
||||||
|
if (ticks_started) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ticks_started = true;
|
||||||
|
start = svcGetSystemTick();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t PltGetMicroseconds(void) {
|
||||||
|
if (!ticks_started) {
|
||||||
|
PltTicksInit();
|
||||||
|
}
|
||||||
|
uint64_t elapsed = svcGetSystemTick() - start;
|
||||||
|
return elapsed * 1000 / CPU_TICKS_PER_MSEC;
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
/* Use CLOCK_MONOTONIC_RAW, if available, which is not subject to adjustment by NTP */
|
||||||
|
#ifdef HAVE_CLOCK_GETTIME
|
||||||
|
static struct timespec start_ts;
|
||||||
|
# ifdef CLOCK_MONOTONIC_RAW
|
||||||
|
# define PLT_MONOTONIC_CLOCK CLOCK_MONOTONIC_RAW
|
||||||
|
# else
|
||||||
|
# define PLT_MONOTONIC_CLOCK CLOCK_MONOTONIC
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static struct timeval start_tv;
|
||||||
|
|
||||||
|
void PltTicksInit(void) {
|
||||||
|
if (ticks_started) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ticks_started = true;
|
||||||
|
#ifdef HAVE_CLOCK_GETTIME
|
||||||
|
if (clock_gettime(PLT_MONOTONIC_CLOCK, &start_ts) == 0) {
|
||||||
|
has_monotonic_time = true;
|
||||||
|
} else
|
||||||
|
#endif
|
||||||
|
{
|
||||||
|
gettimeofday(&start_tv, NULL);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t PltGetMicroseconds(void) {
|
||||||
|
if (!ticks_started) {
|
||||||
|
PltTicksInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (has_monotonic_time) {
|
||||||
|
#ifdef HAVE_CLOCK_GETTIME
|
||||||
|
struct timespec now;
|
||||||
|
clock_gettime(PLT_MONOTONIC_CLOCK, &now);
|
||||||
|
return (uint64_t)(((int64_t)(now.tv_sec - start_ts.tv_sec) * 1000000) + ((now.tv_nsec - start_ts.tv_nsec) / 1000));
|
||||||
|
#else
|
||||||
|
LC_ASSERT(false);
|
||||||
|
return 0;
|
||||||
|
#endif
|
||||||
|
} else {
|
||||||
|
struct timeval now;
|
||||||
|
gettimeofday(&now, NULL);
|
||||||
|
return (uint64_t)(((int64_t)(now.tv_sec - start_tv.tv_sec) * 1000000) + (now.tv_usec - start_tv.tv_usec));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
uint64_t PltGetMillis(void) {
|
||||||
|
return PltGetMicroseconds() / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
//// End timing functions
|
||||||
|
|
||||||
bool PltSafeStrcpy(char* dest, size_t dest_size, const char* src) {
|
bool PltSafeStrcpy(char* dest, size_t dest_size, const char* src) {
|
||||||
LC_ASSERT(dest_size > 0);
|
LC_ASSERT(dest_size > 0);
|
||||||
|
|
||||||
@@ -474,6 +607,8 @@ bool PltSafeStrcpy(char* dest, size_t dest_size, const char* src) {
|
|||||||
int initializePlatform(void) {
|
int initializePlatform(void) {
|
||||||
int err;
|
int err;
|
||||||
|
|
||||||
|
PltTicksInit();
|
||||||
|
|
||||||
err = initializePlatformSockets();
|
err = initializePlatformSockets();
|
||||||
if (err != 0) {
|
if (err != 0) {
|
||||||
return err;
|
return err;
|
||||||
|
|||||||
@@ -18,6 +18,15 @@
|
|||||||
#include <Windows.h>
|
#include <Windows.h>
|
||||||
#include <Winsock2.h>
|
#include <Winsock2.h>
|
||||||
#include <ws2tcpip.h>
|
#include <ws2tcpip.h>
|
||||||
|
#elif defined(__APPLE__)
|
||||||
|
#include <mach/mach_time.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <pthread.h>
|
||||||
|
#include <sys/time.h>
|
||||||
|
#include <sys/ioctl.h>
|
||||||
|
#include <arpa/inet.h>
|
||||||
|
#include <netinet/in.h>
|
||||||
|
#include <fcntl.h>
|
||||||
#elif defined(__vita__)
|
#elif defined(__vita__)
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
@@ -146,6 +155,11 @@
|
|||||||
|
|
||||||
int initializePlatform(void);
|
int initializePlatform(void);
|
||||||
void cleanupPlatform(void);
|
void cleanupPlatform(void);
|
||||||
|
bool PltSafeStrcpy(char* dest, size_t dest_size, const char* src);
|
||||||
|
|
||||||
|
void PltTicksInit(void);
|
||||||
|
|
||||||
|
uint64_t PltGetMicroseconds(void);
|
||||||
|
|
||||||
uint64_t PltGetMillis(void);
|
uint64_t PltGetMillis(void);
|
||||||
bool PltSafeStrcpy(char* dest, size_t dest_size, const char* src);
|
|
||||||
|
|||||||
@@ -204,15 +204,19 @@ static PRTPA_FEC_BLOCK getFecBlockForRtpPacket(PRTP_AUDIO_QUEUE queue, PRTP_PACK
|
|||||||
|
|
||||||
if (packet->packetType == RTP_PAYLOAD_TYPE_AUDIO) {
|
if (packet->packetType == RTP_PAYLOAD_TYPE_AUDIO) {
|
||||||
if (length < sizeof(RTP_PACKET)) {
|
if (length < sizeof(RTP_PACKET)) {
|
||||||
|
queue->stats.packetCountInvalid++;
|
||||||
Limelog("RTP audio data packet too small: %u\n", length);
|
Limelog("RTP audio data packet too small: %u\n", length);
|
||||||
LC_ASSERT_VT(false);
|
LC_ASSERT_VT(false);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
queue->stats.packetCountAudio++;
|
||||||
|
|
||||||
// Remember if we've received out-of-sequence packets lately. We can use
|
// Remember if we've received out-of-sequence packets lately. We can use
|
||||||
// this knowledge to more quickly give up on FEC blocks.
|
// this knowledge to more quickly give up on FEC blocks.
|
||||||
if (!queue->synchronizing && isBefore16(packet->sequenceNumber, queue->oldestRtpBaseSequenceNumber)) {
|
if (!queue->synchronizing && isBefore16(packet->sequenceNumber, queue->oldestRtpBaseSequenceNumber)) {
|
||||||
queue->lastOosSequenceNumber = packet->sequenceNumber;
|
queue->lastOosSequenceNumber = packet->sequenceNumber;
|
||||||
|
queue->stats.packetCountOOS++;
|
||||||
if (!queue->receivedOosData) {
|
if (!queue->receivedOosData) {
|
||||||
Limelog("Leaving fast audio recovery mode after OOS audio data (%u < %u)\n",
|
Limelog("Leaving fast audio recovery mode after OOS audio data (%u < %u)\n",
|
||||||
packet->sequenceNumber, queue->oldestRtpBaseSequenceNumber);
|
packet->sequenceNumber, queue->oldestRtpBaseSequenceNumber);
|
||||||
@@ -238,11 +242,14 @@ static PRTPA_FEC_BLOCK getFecBlockForRtpPacket(PRTP_AUDIO_QUEUE queue, PRTP_PACK
|
|||||||
PAUDIO_FEC_HEADER fecHeader = (PAUDIO_FEC_HEADER)(packet + 1);
|
PAUDIO_FEC_HEADER fecHeader = (PAUDIO_FEC_HEADER)(packet + 1);
|
||||||
|
|
||||||
if (length < sizeof(RTP_PACKET) + sizeof(AUDIO_FEC_HEADER)) {
|
if (length < sizeof(RTP_PACKET) + sizeof(AUDIO_FEC_HEADER)) {
|
||||||
|
queue->stats.packetCountFecInvalid++;
|
||||||
Limelog("RTP audio FEC packet too small: %u\n", length);
|
Limelog("RTP audio FEC packet too small: %u\n", length);
|
||||||
LC_ASSERT_VT(false);
|
LC_ASSERT_VT(false);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
queue->stats.packetCountFec++;
|
||||||
|
|
||||||
// This is an FEC packet, so we can just copy (and byteswap) the FEC header
|
// This is an FEC packet, so we can just copy (and byteswap) the FEC header
|
||||||
fecBlockPayloadType = fecHeader->payloadType;
|
fecBlockPayloadType = fecHeader->payloadType;
|
||||||
fecBlockBaseSeqNum = BE16(fecHeader->baseSequenceNumber);
|
fecBlockBaseSeqNum = BE16(fecHeader->baseSequenceNumber);
|
||||||
@@ -252,6 +259,7 @@ static PRTPA_FEC_BLOCK getFecBlockForRtpPacket(PRTP_AUDIO_QUEUE queue, PRTP_PACK
|
|||||||
// Ensure the FEC shard index is valid to prevent OOB access
|
// Ensure the FEC shard index is valid to prevent OOB access
|
||||||
// later during recovery.
|
// later during recovery.
|
||||||
if (fecHeader->fecShardIndex >= RTPA_FEC_SHARDS) {
|
if (fecHeader->fecShardIndex >= RTPA_FEC_SHARDS) {
|
||||||
|
queue->stats.packetCountFecInvalid++;
|
||||||
Limelog("Too many audio FEC shards: %u\n", fecHeader->fecShardIndex);
|
Limelog("Too many audio FEC shards: %u\n", fecHeader->fecShardIndex);
|
||||||
LC_ASSERT_VT(false);
|
LC_ASSERT_VT(false);
|
||||||
return NULL;
|
return NULL;
|
||||||
@@ -261,6 +269,7 @@ static PRTPA_FEC_BLOCK getFecBlockForRtpPacket(PRTP_AUDIO_QUEUE queue, PRTP_PACK
|
|||||||
// The FEC blocks must start on a RTPA_DATA_SHARDS boundary for our queuing logic to work. This isn't
|
// The FEC blocks must start on a RTPA_DATA_SHARDS boundary for our queuing logic to work. This isn't
|
||||||
// the case for older versions of GeForce Experience (at least 3.13). Disable the FEC logic if this
|
// the case for older versions of GeForce Experience (at least 3.13). Disable the FEC logic if this
|
||||||
// invariant is validated.
|
// invariant is validated.
|
||||||
|
queue->stats.packetCountFecInvalid++;
|
||||||
Limelog("Invalid FEC block base sequence number (got %u, expected %u)\n",
|
Limelog("Invalid FEC block base sequence number (got %u, expected %u)\n",
|
||||||
fecBlockBaseSeqNum, (fecBlockBaseSeqNum / RTPA_DATA_SHARDS) * RTPA_DATA_SHARDS);
|
fecBlockBaseSeqNum, (fecBlockBaseSeqNum / RTPA_DATA_SHARDS) * RTPA_DATA_SHARDS);
|
||||||
Limelog("Audio FEC has been disabled due to an incompatibility with your host's old software!\n");
|
Limelog("Audio FEC has been disabled due to an incompatibility with your host's old software!\n");
|
||||||
@@ -304,6 +313,7 @@ static PRTPA_FEC_BLOCK getFecBlockForRtpPacket(PRTP_AUDIO_QUEUE queue, PRTP_PACK
|
|||||||
if (existingBlock->blockSize != blockSize) {
|
if (existingBlock->blockSize != blockSize) {
|
||||||
// This can happen with older versions of GeForce Experience (3.13) and Sunshine that don't use a
|
// This can happen with older versions of GeForce Experience (3.13) and Sunshine that don't use a
|
||||||
// constant size for audio packets.
|
// constant size for audio packets.
|
||||||
|
queue->stats.packetCountFecInvalid++;
|
||||||
Limelog("Audio block size mismatch (got %u, expected %u)\n", blockSize, existingBlock->blockSize);
|
Limelog("Audio block size mismatch (got %u, expected %u)\n", blockSize, existingBlock->blockSize);
|
||||||
Limelog("Audio FEC has been disabled due to an incompatibility with your host's old software!\n");
|
Limelog("Audio FEC has been disabled due to an incompatibility with your host's old software!\n");
|
||||||
LC_ASSERT_VT(existingBlock->blockSize == blockSize);
|
LC_ASSERT_VT(existingBlock->blockSize == blockSize);
|
||||||
@@ -331,7 +341,7 @@ static PRTPA_FEC_BLOCK getFecBlockForRtpPacket(PRTP_AUDIO_QUEUE queue, PRTP_PACK
|
|||||||
|
|
||||||
memset(block, 0, sizeof(*block));
|
memset(block, 0, sizeof(*block));
|
||||||
|
|
||||||
block->queueTimeMs = PltGetMillis();
|
block->queueTimeUs = PltGetMicroseconds();
|
||||||
block->blockSize = blockSize;
|
block->blockSize = blockSize;
|
||||||
memset(block->marks, 1, sizeof(block->marks));
|
memset(block->marks, 1, sizeof(block->marks));
|
||||||
|
|
||||||
@@ -454,13 +464,15 @@ static bool completeFecBlock(PRTP_AUDIO_QUEUE queue, PRTPA_FEC_BLOCK block) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FEC_VERBOSE
|
|
||||||
if (block->dataShardsReceived != RTPA_DATA_SHARDS) {
|
if (block->dataShardsReceived != RTPA_DATA_SHARDS) {
|
||||||
|
queue->stats.packetCountFecRecovered += RTPA_DATA_SHARDS - block->dataShardsReceived;
|
||||||
|
#ifdef FEC_VERBOSE
|
||||||
Limelog("Recovered %d audio data shards from block %d\n",
|
Limelog("Recovered %d audio data shards from block %d\n",
|
||||||
RTPA_DATA_SHARDS - block->dataShardsReceived,
|
RTPA_DATA_SHARDS - block->dataShardsReceived,
|
||||||
block->fecHeader.baseSequenceNumber);
|
block->fecHeader.baseSequenceNumber);
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef FEC_VALIDATION_MODE
|
#ifdef FEC_VALIDATION_MODE
|
||||||
// Check the RTP header values
|
// Check the RTP header values
|
||||||
@@ -531,9 +543,10 @@ static void handleMissingPackets(PRTP_AUDIO_QUEUE queue) {
|
|||||||
// At this point, we know we've got a second FEC block queued up waiting on the first one to complete.
|
// At this point, we know we've got a second FEC block queued up waiting on the first one to complete.
|
||||||
// If we've never seen OOS data from this host, we'll assume the first one is lost and skip forward.
|
// If we've never seen OOS data from this host, we'll assume the first one is lost and skip forward.
|
||||||
// If we have seen OOS data, we'll wait for a little while longer to see if OOS packets arrive before giving up.
|
// If we have seen OOS data, we'll wait for a little while longer to see if OOS packets arrive before giving up.
|
||||||
if (!queue->receivedOosData || PltGetMillis() - queue->blockHead->queueTimeMs > (uint32_t)(AudioPacketDuration * RTPA_DATA_SHARDS) + RTPQ_OOS_WAIT_TIME_MS) {
|
if (!queue->receivedOosData || PltGetMicroseconds() - queue->blockHead->queueTimeUs > (uint64_t)(AudioPacketDuration * RTPA_DATA_SHARDS) + (RTPQ_OOS_WAIT_TIME_MS * 1000)) {
|
||||||
LC_ASSERT(!isBefore16(queue->nextRtpSequenceNumber, queue->blockHead->fecHeader.baseSequenceNumber));
|
LC_ASSERT(!isBefore16(queue->nextRtpSequenceNumber, queue->blockHead->fecHeader.baseSequenceNumber));
|
||||||
|
|
||||||
|
queue->stats.packetCountFecFailed++;
|
||||||
Limelog("Unable to recover audio data block %u to %u (%u+%u=%u received < %u needed)\n",
|
Limelog("Unable to recover audio data block %u to %u (%u+%u=%u received < %u needed)\n",
|
||||||
queue->blockHead->fecHeader.baseSequenceNumber,
|
queue->blockHead->fecHeader.baseSequenceNumber,
|
||||||
queue->blockHead->fecHeader.baseSequenceNumber + RTPA_DATA_SHARDS - 1,
|
queue->blockHead->fecHeader.baseSequenceNumber + RTPA_DATA_SHARDS - 1,
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ typedef struct _RTPA_FEC_BLOCK {
|
|||||||
|
|
||||||
AUDIO_FEC_HEADER fecHeader;
|
AUDIO_FEC_HEADER fecHeader;
|
||||||
|
|
||||||
uint64_t queueTimeMs;
|
uint64_t queueTimeUs;
|
||||||
uint8_t dataShardsReceived;
|
uint8_t dataShardsReceived;
|
||||||
uint8_t fecShardsReceived;
|
uint8_t fecShardsReceived;
|
||||||
bool fullyReassembled;
|
bool fullyReassembled;
|
||||||
@@ -63,6 +63,8 @@ typedef struct _RTP_AUDIO_QUEUE {
|
|||||||
bool receivedOosData;
|
bool receivedOosData;
|
||||||
bool synchronizing;
|
bool synchronizing;
|
||||||
bool incompatibleServer;
|
bool incompatibleServer;
|
||||||
|
|
||||||
|
RTP_AUDIO_STATS stats;
|
||||||
} RTP_AUDIO_QUEUE, *PRTP_AUDIO_QUEUE;
|
} RTP_AUDIO_QUEUE, *PRTP_AUDIO_QUEUE;
|
||||||
|
|
||||||
#define RTPQ_RET_PACKET_CONSUMED 0x1
|
#define RTPQ_RET_PACKET_CONSUMED 0x1
|
||||||
|
|||||||
@@ -497,8 +497,8 @@ static void stageCompleteFecBlock(PRTP_VIDEO_QUEUE queue) {
|
|||||||
// and use the first packet's receive time for all packets. This ends up
|
// and use the first packet's receive time for all packets. This ends up
|
||||||
// actually being better for the measurements that the depacketizer does,
|
// actually being better for the measurements that the depacketizer does,
|
||||||
// since it properly handles out of order packets.
|
// since it properly handles out of order packets.
|
||||||
LC_ASSERT(queue->bufferFirstRecvTimeMs != 0);
|
LC_ASSERT(queue->bufferFirstRecvTimeUs != 0);
|
||||||
entry->receiveTimeMs = queue->bufferFirstRecvTimeMs;
|
entry->receiveTimeUs = queue->bufferFirstRecvTimeUs;
|
||||||
|
|
||||||
// Move this packet to the completed FEC block list
|
// Move this packet to the completed FEC block list
|
||||||
insertEntryIntoList(&queue->completedFecBlockList, entry);
|
insertEntryIntoList(&queue->completedFecBlockList, entry);
|
||||||
@@ -690,7 +690,7 @@ int RtpvAddPacket(PRTP_VIDEO_QUEUE queue, PRTP_PACKET packet, int length, PRTPV_
|
|||||||
// being able to reconstruct a full frame from it.
|
// being able to reconstruct a full frame from it.
|
||||||
connectionSawFrame(queue->currentFrameNumber);
|
connectionSawFrame(queue->currentFrameNumber);
|
||||||
|
|
||||||
queue->bufferFirstRecvTimeMs = PltGetMillis();
|
queue->bufferFirstRecvTimeUs = PltGetMicroseconds();
|
||||||
queue->bufferLowestSequenceNumber = U16(packet->sequenceNumber - fecIndex);
|
queue->bufferLowestSequenceNumber = U16(packet->sequenceNumber - fecIndex);
|
||||||
queue->nextContiguousSequenceNumber = queue->bufferLowestSequenceNumber;
|
queue->nextContiguousSequenceNumber = queue->bufferLowestSequenceNumber;
|
||||||
queue->receivedDataPackets = 0;
|
queue->receivedDataPackets = 0;
|
||||||
@@ -706,6 +706,9 @@ int RtpvAddPacket(PRTP_VIDEO_QUEUE queue, PRTP_PACKET packet, int length, PRTPV_
|
|||||||
queue->bufferHighestSequenceNumber = U16(queue->bufferFirstParitySequenceNumber + queue->bufferParityPackets - 1);
|
queue->bufferHighestSequenceNumber = U16(queue->bufferFirstParitySequenceNumber + queue->bufferParityPackets - 1);
|
||||||
queue->multiFecCurrentBlockNumber = fecCurrentBlockNumber;
|
queue->multiFecCurrentBlockNumber = fecCurrentBlockNumber;
|
||||||
queue->multiFecLastBlockNumber = (nvPacket->multiFecBlocks >> 6) & 0x3;
|
queue->multiFecLastBlockNumber = (nvPacket->multiFecBlocks >> 6) & 0x3;
|
||||||
|
|
||||||
|
queue->stats.packetCountVideo += queue->bufferDataPackets;
|
||||||
|
queue->stats.packetCountFec += queue->bufferParityPackets;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reject packets above our FEC queue valid sequence number range
|
// Reject packets above our FEC queue valid sequence number range
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ typedef struct _RTPV_QUEUE_ENTRY {
|
|||||||
struct _RTPV_QUEUE_ENTRY* next;
|
struct _RTPV_QUEUE_ENTRY* next;
|
||||||
struct _RTPV_QUEUE_ENTRY* prev;
|
struct _RTPV_QUEUE_ENTRY* prev;
|
||||||
PRTP_PACKET packet;
|
PRTP_PACKET packet;
|
||||||
uint64_t receiveTimeMs;
|
uint64_t receiveTimeUs;
|
||||||
uint32_t presentationTimeMs;
|
uint32_t presentationTimeMs;
|
||||||
int length;
|
int length;
|
||||||
bool isParity;
|
bool isParity;
|
||||||
@@ -22,7 +22,7 @@ typedef struct _RTP_VIDEO_QUEUE {
|
|||||||
RTPV_QUEUE_LIST pendingFecBlockList;
|
RTPV_QUEUE_LIST pendingFecBlockList;
|
||||||
RTPV_QUEUE_LIST completedFecBlockList;
|
RTPV_QUEUE_LIST completedFecBlockList;
|
||||||
|
|
||||||
uint64_t bufferFirstRecvTimeMs;
|
uint64_t bufferFirstRecvTimeUs;
|
||||||
uint32_t bufferLowestSequenceNumber;
|
uint32_t bufferLowestSequenceNumber;
|
||||||
uint32_t bufferHighestSequenceNumber;
|
uint32_t bufferHighestSequenceNumber;
|
||||||
uint32_t bufferFirstParitySequenceNumber;
|
uint32_t bufferFirstParitySequenceNumber;
|
||||||
@@ -45,6 +45,8 @@ typedef struct _RTP_VIDEO_QUEUE {
|
|||||||
|
|
||||||
uint32_t lastOosFramePresentationTimestamp;
|
uint32_t lastOosFramePresentationTimestamp;
|
||||||
bool receivedOosData;
|
bool receivedOosData;
|
||||||
|
|
||||||
|
RTP_VIDEO_STATS stats; // the above values are short-lived, this tracks stats for the life of the queue
|
||||||
} RTP_VIDEO_QUEUE, *PRTP_VIDEO_QUEUE;
|
} RTP_VIDEO_QUEUE, *PRTP_VIDEO_QUEUE;
|
||||||
|
|
||||||
#define RTPF_RET_QUEUED 0
|
#define RTPF_RET_QUEUED 0
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ static bool decodingFrame;
|
|||||||
static int frameType;
|
static int frameType;
|
||||||
static uint16_t lastPacketPayloadLength;
|
static uint16_t lastPacketPayloadLength;
|
||||||
static bool strictIdrFrameWait;
|
static bool strictIdrFrameWait;
|
||||||
static uint64_t syntheticPtsBase;
|
static uint64_t syntheticPtsBaseUs;
|
||||||
static uint16_t frameHostProcessingLatency;
|
static uint16_t frameHostProcessingLatency;
|
||||||
static uint64_t firstPacketReceiveTime;
|
static uint64_t firstPacketReceiveTimeUs;
|
||||||
static unsigned int firstPacketPresentationTime;
|
static unsigned int firstPacketPresentationTime;
|
||||||
static bool dropStatePending;
|
static bool dropStatePending;
|
||||||
static bool idrFrameProcessed;
|
static bool idrFrameProcessed;
|
||||||
@@ -68,9 +68,9 @@ void initializeVideoDepacketizer(int pktSize) {
|
|||||||
waitingForRefInvalFrame = false;
|
waitingForRefInvalFrame = false;
|
||||||
lastPacketInStream = UINT32_MAX;
|
lastPacketInStream = UINT32_MAX;
|
||||||
decodingFrame = false;
|
decodingFrame = false;
|
||||||
syntheticPtsBase = 0;
|
syntheticPtsBaseUs = 0;
|
||||||
frameHostProcessingLatency = 0;
|
frameHostProcessingLatency = 0;
|
||||||
firstPacketReceiveTime = 0;
|
firstPacketReceiveTimeUs = 0;
|
||||||
firstPacketPresentationTime = 0;
|
firstPacketPresentationTime = 0;
|
||||||
lastPacketPayloadLength = 0;
|
lastPacketPayloadLength = 0;
|
||||||
dropStatePending = false;
|
dropStatePending = false;
|
||||||
@@ -483,9 +483,9 @@ static void reassembleFrame(int frameNumber) {
|
|||||||
qdu->decodeUnit.frameType = frameType;
|
qdu->decodeUnit.frameType = frameType;
|
||||||
qdu->decodeUnit.frameNumber = frameNumber;
|
qdu->decodeUnit.frameNumber = frameNumber;
|
||||||
qdu->decodeUnit.frameHostProcessingLatency = frameHostProcessingLatency;
|
qdu->decodeUnit.frameHostProcessingLatency = frameHostProcessingLatency;
|
||||||
qdu->decodeUnit.receiveTimeMs = firstPacketReceiveTime;
|
qdu->decodeUnit.receiveTimeUs = firstPacketReceiveTimeUs;
|
||||||
qdu->decodeUnit.presentationTimeMs = firstPacketPresentationTime;
|
qdu->decodeUnit.presentationTimeMs = firstPacketPresentationTime;
|
||||||
qdu->decodeUnit.enqueueTimeMs = LiGetMillis();
|
qdu->decodeUnit.enqueueTimeUs = PltGetMicroseconds();
|
||||||
|
|
||||||
// These might be wrong for a few frames during a transition between SDR and HDR,
|
// These might be wrong for a few frames during a transition between SDR and HDR,
|
||||||
// but the effects shouldn't very noticable since that's an infrequent operation.
|
// but the effects shouldn't very noticable since that's an infrequent operation.
|
||||||
@@ -740,7 +740,7 @@ static bool isFirstPacket(uint8_t flags, uint8_t fecBlockNumber) {
|
|||||||
// Process an RTP Payload
|
// Process an RTP Payload
|
||||||
// The caller will free *existingEntry unless we NULL it
|
// The caller will free *existingEntry unless we NULL it
|
||||||
static void processRtpPayload(PNV_VIDEO_PACKET videoPacket, int length,
|
static void processRtpPayload(PNV_VIDEO_PACKET videoPacket, int length,
|
||||||
uint64_t receiveTimeMs, unsigned int presentationTimeMs,
|
uint64_t receiveTimeUs, uint64_t presentationTimeMs,
|
||||||
PLENTRY_INTERNAL* existingEntry) {
|
PLENTRY_INTERNAL* existingEntry) {
|
||||||
BUFFER_DESC currentPos;
|
BUFFER_DESC currentPos;
|
||||||
uint32_t frameIndex;
|
uint32_t frameIndex;
|
||||||
@@ -823,19 +823,19 @@ static void processRtpPayload(PNV_VIDEO_PACKET videoPacket, int length,
|
|||||||
// We're now decoding a frame
|
// We're now decoding a frame
|
||||||
decodingFrame = true;
|
decodingFrame = true;
|
||||||
frameType = FRAME_TYPE_PFRAME;
|
frameType = FRAME_TYPE_PFRAME;
|
||||||
firstPacketReceiveTime = receiveTimeMs;
|
firstPacketReceiveTimeUs = receiveTimeUs;
|
||||||
|
|
||||||
// Some versions of Sunshine don't send a valid PTS, so we will
|
// Some versions of Sunshine don't send a valid PTS, so we will
|
||||||
// synthesize one using the receive time as the time base.
|
// synthesize one using the receive time as the time base.
|
||||||
if (!syntheticPtsBase) {
|
if (!syntheticPtsBaseUs) {
|
||||||
syntheticPtsBase = receiveTimeMs;
|
syntheticPtsBaseUs = receiveTimeUs;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!presentationTimeMs && frameIndex > 0) {
|
if (!presentationTimeMs && frameIndex > 0) {
|
||||||
firstPacketPresentationTime = (unsigned int)(receiveTimeMs - syntheticPtsBase);
|
firstPacketPresentationTime = (unsigned int)((receiveTimeUs - syntheticPtsBaseUs) / 1000);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
firstPacketPresentationTime = presentationTimeMs;
|
firstPacketPresentationTime = (unsigned int)presentationTimeMs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1154,7 +1154,7 @@ void queueRtpPacket(PRTPV_QUEUE_ENTRY queueEntryPtr) {
|
|||||||
RTPV_QUEUE_ENTRY queueEntry = *queueEntryPtr;
|
RTPV_QUEUE_ENTRY queueEntry = *queueEntryPtr;
|
||||||
|
|
||||||
LC_ASSERT(!queueEntry.isParity);
|
LC_ASSERT(!queueEntry.isParity);
|
||||||
LC_ASSERT(queueEntry.receiveTimeMs != 0);
|
LC_ASSERT(queueEntry.receiveTimeUs != 0);
|
||||||
|
|
||||||
dataOffset = sizeof(*queueEntry.packet);
|
dataOffset = sizeof(*queueEntry.packet);
|
||||||
if (queueEntry.packet->header & FLAG_EXTENSION) {
|
if (queueEntry.packet->header & FLAG_EXTENSION) {
|
||||||
@@ -1173,7 +1173,7 @@ void queueRtpPacket(PRTPV_QUEUE_ENTRY queueEntryPtr) {
|
|||||||
|
|
||||||
processRtpPayload((PNV_VIDEO_PACKET)(((char*)queueEntry.packet) + dataOffset),
|
processRtpPayload((PNV_VIDEO_PACKET)(((char*)queueEntry.packet) + dataOffset),
|
||||||
queueEntry.length - dataOffset,
|
queueEntry.length - dataOffset,
|
||||||
queueEntry.receiveTimeMs,
|
queueEntry.receiveTimeUs,
|
||||||
queueEntry.presentationTimeMs,
|
queueEntry.presentationTimeMs,
|
||||||
&existingEntry);
|
&existingEntry);
|
||||||
|
|
||||||
|
|||||||
@@ -168,9 +168,7 @@ static void VideoReceiveThreadProc(void* context) {
|
|||||||
|
|
||||||
#ifndef LC_FUZZING
|
#ifndef LC_FUZZING
|
||||||
if (!receivedFullFrame) {
|
if (!receivedFullFrame) {
|
||||||
uint64_t now = PltGetMillis();
|
if (PltGetMillis() - firstDataTimeMs >= FIRST_FRAME_TIMEOUT_SEC * 1000) {
|
||||||
|
|
||||||
if (now - firstDataTimeMs >= FIRST_FRAME_TIMEOUT_SEC * 1000) {
|
|
||||||
Limelog("Terminating connection due to lack of a successful video frame\n");
|
Limelog("Terminating connection due to lack of a successful video frame\n");
|
||||||
ListenerCallbacks.connectionTerminated(ML_ERROR_NO_VIDEO_FRAME);
|
ListenerCallbacks.connectionTerminated(ML_ERROR_NO_VIDEO_FRAME);
|
||||||
break;
|
break;
|
||||||
@@ -415,3 +413,7 @@ int startVideoStream(void* rendererContext, int drFlags) {
|
|||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const RTP_VIDEO_STATS* LiGetRTPVideoStats(void) {
|
||||||
|
return &rtpQueue.stats;
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user