mirror of
https://github.com/moonlight-stream/moonlight-common-c.git
synced 2025-08-17 17:05:50 +00:00
Add Video/Audio prefix to ThreadProc names
It's easier to find them in stack traces that way.
This commit is contained in:
parent
eff97414bf
commit
75999a6e07
@ -41,7 +41,7 @@ typedef struct _QUEUED_AUDIO_PACKET {
|
||||
char data[MAX_PACKET_SIZE];
|
||||
} QUEUED_AUDIO_PACKET, *PQUEUED_AUDIO_PACKET;
|
||||
|
||||
static void UdpPingThreadProc(void* context) {
|
||||
static void AudioPingThreadProc(void* context) {
|
||||
// Ping in ASCII
|
||||
char pingData[] = { 0x50, 0x49, 0x4E, 0x47 };
|
||||
LC_SOCKADDR saddr;
|
||||
@ -93,7 +93,7 @@ int initializeAudioStream(void) {
|
||||
|
||||
// We may receive audio before our threads are started, but that's okay. We'll
|
||||
// drop the first 1 second of audio packets to catch up with the backlog.
|
||||
int err = PltCreateThread("AudioPing", UdpPingThreadProc, NULL, &udpPingThread);
|
||||
int err = PltCreateThread("AudioPing", AudioPingThreadProc, NULL, &udpPingThread);
|
||||
if (err != 0) {
|
||||
closeSocket(rtpSocket);
|
||||
rtpSocket = INVALID_SOCKET;
|
||||
@ -225,7 +225,7 @@ static void decodeInputData(PQUEUED_AUDIO_PACKET packet) {
|
||||
}
|
||||
}
|
||||
|
||||
static void ReceiveThreadProc(void* context) {
|
||||
static void AudioReceiveThreadProc(void* context) {
|
||||
PRTP_PACKET rtp;
|
||||
PQUEUED_AUDIO_PACKET packet;
|
||||
int queueStatus;
|
||||
@ -369,7 +369,7 @@ static void ReceiveThreadProc(void* context) {
|
||||
}
|
||||
}
|
||||
|
||||
static void DecoderThreadProc(void* context) {
|
||||
static void AudioDecoderThreadProc(void* context) {
|
||||
int err;
|
||||
PQUEUED_AUDIO_PACKET packet;
|
||||
|
||||
@ -438,7 +438,7 @@ int startAudioStream(void* audioContext, int arFlags) {
|
||||
|
||||
AudioCallbacks.start();
|
||||
|
||||
err = PltCreateThread("AudioRecv", ReceiveThreadProc, NULL, &receiveThread);
|
||||
err = PltCreateThread("AudioRecv", AudioReceiveThreadProc, NULL, &receiveThread);
|
||||
if (err != 0) {
|
||||
AudioCallbacks.stop();
|
||||
closeSocket(rtpSocket);
|
||||
@ -447,7 +447,7 @@ int startAudioStream(void* audioContext, int arFlags) {
|
||||
}
|
||||
|
||||
if ((AudioCallbacks.capabilities & CAPABILITY_DIRECT_SUBMIT) == 0) {
|
||||
err = PltCreateThread("AudioDec", DecoderThreadProc, NULL, &decoderThread);
|
||||
err = PltCreateThread("AudioDec", AudioDecoderThreadProc, NULL, &decoderThread);
|
||||
if (err != 0) {
|
||||
AudioCallbacks.stop();
|
||||
PltInterruptThread(&receiveThread);
|
||||
|
@ -43,7 +43,7 @@ void destroyVideoStream(void) {
|
||||
}
|
||||
|
||||
// UDP Ping proc
|
||||
static void UdpPingThreadProc(void* context) {
|
||||
static void VideoPingThreadProc(void* context) {
|
||||
char pingData[] = { 0x50, 0x49, 0x4E, 0x47 };
|
||||
LC_SOCKADDR saddr;
|
||||
|
||||
@ -62,7 +62,7 @@ static void UdpPingThreadProc(void* context) {
|
||||
}
|
||||
|
||||
// Receive thread proc
|
||||
static void ReceiveThreadProc(void* context) {
|
||||
static void VideoReceiveThreadProc(void* context) {
|
||||
int err;
|
||||
int bufferSize, receiveSize;
|
||||
char* buffer;
|
||||
@ -160,7 +160,7 @@ void notifyKeyFrameReceived(void) {
|
||||
}
|
||||
|
||||
// Decoder thread proc
|
||||
static void DecoderThreadProc(void* context) {
|
||||
static void VideoDecoderThreadProc(void* context) {
|
||||
while (!PltIsThreadInterrupted(&decoderThread)) {
|
||||
VIDEO_FRAME_HANDLE frameHandle;
|
||||
PDECODE_UNIT decodeUnit;
|
||||
@ -252,7 +252,7 @@ int startVideoStream(void* rendererContext, int drFlags) {
|
||||
|
||||
VideoCallbacks.start();
|
||||
|
||||
err = PltCreateThread("VideoRecv", ReceiveThreadProc, NULL, &receiveThread);
|
||||
err = PltCreateThread("VideoRecv", VideoReceiveThreadProc, NULL, &receiveThread);
|
||||
if (err != 0) {
|
||||
VideoCallbacks.stop();
|
||||
closeSocket(rtpSocket);
|
||||
@ -261,7 +261,7 @@ int startVideoStream(void* rendererContext, int drFlags) {
|
||||
}
|
||||
|
||||
if ((VideoCallbacks.capabilities & (CAPABILITY_DIRECT_SUBMIT | CAPABILITY_PULL_RENDERER)) == 0) {
|
||||
err = PltCreateThread("VideoDec", DecoderThreadProc, NULL, &decoderThread);
|
||||
err = PltCreateThread("VideoDec", VideoDecoderThreadProc, NULL, &decoderThread);
|
||||
if (err != 0) {
|
||||
VideoCallbacks.stop();
|
||||
PltInterruptThread(&receiveThread);
|
||||
@ -300,7 +300,7 @@ int startVideoStream(void* rendererContext, int drFlags) {
|
||||
|
||||
// Start pinging before reading the first frame so GFE knows where
|
||||
// to send UDP data
|
||||
err = PltCreateThread("VideoPing", UdpPingThreadProc, NULL, &udpPingThread);
|
||||
err = PltCreateThread("VideoPing", VideoPingThreadProc, NULL, &udpPingThread);
|
||||
if (err != 0) {
|
||||
VideoCallbacks.stop();
|
||||
stopVideoDepacketizer();
|
||||
|
Loading…
x
Reference in New Issue
Block a user