Use a separate callback to indicate EOS instead of renderFrame()

This commit is contained in:
Cameron Gutman 2022-04-07 21:30:20 -05:00
parent 5de0dc2713
commit 474591c6a5
12 changed files with 18 additions and 50 deletions

View File

@ -584,11 +584,6 @@ void D3D11VARenderer::setHdrMode(bool enabled)
void D3D11VARenderer::renderFrame(AVFrame* frame) void D3D11VARenderer::renderFrame(AVFrame* frame)
{ {
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
// Acquire the context lock for rendering to prevent concurrent // Acquire the context lock for rendering to prevent concurrent
// access from inside FFmpeg's decoding code // access from inside FFmpeg's decoding code
lockContext(this); lockContext(this);

View File

@ -475,11 +475,6 @@ void DrmRenderer::renderFrame(AVFrame* frame)
AVDRMFrameDescriptor mappedFrame; AVDRMFrameDescriptor mappedFrame;
AVDRMFrameDescriptor* drmFrame; AVDRMFrameDescriptor* drmFrame;
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
// If we are acting as the frontend renderer, we'll need to have the backend // If we are acting as the frontend renderer, we'll need to have the backend
// map this frame into a DRM PRIME descriptor that we can render. // map this frame into a DRM PRIME descriptor that we can render.
if (m_BackendRenderer != nullptr) { if (m_BackendRenderer != nullptr) {

View File

@ -978,11 +978,6 @@ int DXVA2Renderer::getDecoderColorspace()
void DXVA2Renderer::renderFrame(AVFrame *frame) void DXVA2Renderer::renderFrame(AVFrame *frame)
{ {
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
IDirect3DSurface9* surface = reinterpret_cast<IDirect3DSurface9*>(frame->data[3]); IDirect3DSurface9* surface = reinterpret_cast<IDirect3DSurface9*>(frame->data[3]);
HRESULT hr; HRESULT hr;

View File

@ -814,16 +814,16 @@ bool EGLRenderer::specialize() {
return err == GL_NO_ERROR; return err == GL_NO_ERROR;
} }
void EGLRenderer::cleanupRenderContext()
{
// Detach the context from the render thread so the destructor can attach it
SDL_GL_MakeCurrent(m_Window, nullptr);
}
void EGLRenderer::renderFrame(AVFrame* frame) void EGLRenderer::renderFrame(AVFrame* frame)
{ {
EGLImage imgs[EGL_MAX_PLANES]; EGLImage imgs[EGL_MAX_PLANES];
if (frame == nullptr) {
// End of stream - unbind the GL context
SDL_GL_MakeCurrent(m_Window, nullptr);
return;
}
// Attach our GL context to the render thread // Attach our GL context to the render thread
// NB: It should already be current, unless the SDL render event watcher // NB: It should already be current, unless the SDL render event watcher
// performs a rendering operation (like a viewport update on resize) on // performs a rendering operation (like a viewport update on resize) on

View File

@ -11,6 +11,7 @@ public:
virtual ~EGLRenderer() override; virtual ~EGLRenderer() override;
virtual bool initialize(PDECODER_PARAMETERS params) override; virtual bool initialize(PDECODER_PARAMETERS params) override;
virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) override; virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) override;
virtual void cleanupRenderContext() override;
virtual void renderFrame(AVFrame* frame) override; virtual void renderFrame(AVFrame* frame) override;
virtual bool testRenderFrame(AVFrame* frame) override; virtual bool testRenderFrame(AVFrame* frame) override;
virtual void notifyOverlayUpdated(Overlay::OverlayType) override; virtual void notifyOverlayUpdated(Overlay::OverlayType) override;

View File

@ -345,11 +345,6 @@ bool MmalRenderer::needsTestFrame()
void MmalRenderer::renderFrame(AVFrame* frame) void MmalRenderer::renderFrame(AVFrame* frame)
{ {
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
MMAL_BUFFER_HEADER_T* buffer = (MMAL_BUFFER_HEADER_T*)frame->data[3]; MMAL_BUFFER_HEADER_T* buffer = (MMAL_BUFFER_HEADER_T*)frame->data[3];
MMAL_STATUS_T status; MMAL_STATUS_T status;

View File

@ -46,8 +46,9 @@ Pacer::~Pacer()
SDL_WaitThread(m_RenderThread, nullptr); SDL_WaitThread(m_RenderThread, nullptr);
} }
else { else {
// Send a null AVFrame to indicate end of stream on the main thread // Notify the renderer that it is being destroyed soon
m_VsyncRenderer->renderFrame(nullptr); // NB: This must happen on the same thread that calls renderFrame().
m_VsyncRenderer->cleanupRenderContext();
} }
// Delete any remaining unconsumed frames // Delete any remaining unconsumed frames
@ -110,8 +111,9 @@ int Pacer::renderThread(void* context)
me->renderLastFrameAndUnlock(); me->renderLastFrameAndUnlock();
} }
// Send a null AVFrame to indicate end of stream on the render thread // Notify the renderer that it is being destroyed soon
me->m_VsyncRenderer->renderFrame(nullptr); // NB: This must happen on the same thread that calls renderFrame().
me->m_VsyncRenderer->cleanupRenderContext();
return 0; return 0;
} }

View File

@ -102,6 +102,11 @@ public:
virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) = 0; virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) = 0;
virtual void renderFrame(AVFrame* frame) = 0; virtual void renderFrame(AVFrame* frame) = 0;
// Called on the same thread as renderFrame() during destruction of the renderer
virtual void cleanupRenderContext() {
// Nothing
}
virtual bool testRenderFrame(AVFrame*) { virtual bool testRenderFrame(AVFrame*) {
// If the renderer doesn't provide an explicit test routine, // If the renderer doesn't provide an explicit test routine,
// we will always assume that any returned AVFrame can be // we will always assume that any returned AVFrame can be

View File

@ -344,11 +344,6 @@ void SdlRenderer::renderFrame(AVFrame* frame)
int err; int err;
AVFrame* swFrame = nullptr; AVFrame* swFrame = nullptr;
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
if (frame->hw_frames_ctx != nullptr && frame->format != AV_PIX_FMT_CUDA) { if (frame->hw_frames_ctx != nullptr && frame->format != AV_PIX_FMT_CUDA) {
#ifdef HAVE_CUDA #ifdef HAVE_CUDA
ReadbackRetry: ReadbackRetry:

View File

@ -351,11 +351,6 @@ int VAAPIRenderer::getDecoderColorspace()
void void
VAAPIRenderer::renderFrame(AVFrame* frame) VAAPIRenderer::renderFrame(AVFrame* frame)
{ {
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
VASurfaceID surface = (VASurfaceID)(uintptr_t)frame->data[3]; VASurfaceID surface = (VASurfaceID)(uintptr_t)frame->data[3];
AVHWDeviceContext* deviceContext = (AVHWDeviceContext*)m_HwContext->data; AVHWDeviceContext* deviceContext = (AVHWDeviceContext*)m_HwContext->data;
AVVAAPIDeviceContext* vaDeviceContext = (AVVAAPIDeviceContext*)deviceContext->hwctx; AVVAAPIDeviceContext* vaDeviceContext = (AVVAAPIDeviceContext*)deviceContext->hwctx;

View File

@ -476,11 +476,6 @@ void VDPAURenderer::renderOverlay(VdpOutputSurface destination, Overlay::Overlay
void VDPAURenderer::renderFrame(AVFrame* frame) void VDPAURenderer::renderFrame(AVFrame* frame)
{ {
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
VdpStatus status; VdpStatus status;
VdpVideoSurface videoSurface = (VdpVideoSurface)(uintptr_t)frame->data[3]; VdpVideoSurface videoSurface = (VdpVideoSurface)(uintptr_t)frame->data[3];

View File

@ -161,11 +161,6 @@ public:
// Caller frees frame after we return // Caller frees frame after we return
virtual void renderFrame(AVFrame* frame) override virtual void renderFrame(AVFrame* frame) override
{ {
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
OSStatus status; OSStatus status;
CVPixelBufferRef pixBuf = reinterpret_cast<CVPixelBufferRef>(frame->data[3]); CVPixelBufferRef pixBuf = reinterpret_cast<CVPixelBufferRef>(frame->data[3]);