Use a separate callback to indicate EOS instead of renderFrame()

This commit is contained in:
Cameron Gutman 2022-04-07 21:30:20 -05:00
parent 5de0dc2713
commit 474591c6a5
12 changed files with 18 additions and 50 deletions

View File

@ -584,11 +584,6 @@ void D3D11VARenderer::setHdrMode(bool enabled)
void D3D11VARenderer::renderFrame(AVFrame* frame)
{
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
// Acquire the context lock for rendering to prevent concurrent
// access from inside FFmpeg's decoding code
lockContext(this);

View File

@ -475,11 +475,6 @@ void DrmRenderer::renderFrame(AVFrame* frame)
AVDRMFrameDescriptor mappedFrame;
AVDRMFrameDescriptor* drmFrame;
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
// If we are acting as the frontend renderer, we'll need to have the backend
// map this frame into a DRM PRIME descriptor that we can render.
if (m_BackendRenderer != nullptr) {

View File

@ -978,11 +978,6 @@ int DXVA2Renderer::getDecoderColorspace()
void DXVA2Renderer::renderFrame(AVFrame *frame)
{
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
IDirect3DSurface9* surface = reinterpret_cast<IDirect3DSurface9*>(frame->data[3]);
HRESULT hr;

View File

@ -814,16 +814,16 @@ bool EGLRenderer::specialize() {
return err == GL_NO_ERROR;
}
void EGLRenderer::cleanupRenderContext()
{
// Detach the context from the render thread so the destructor can attach it
SDL_GL_MakeCurrent(m_Window, nullptr);
}
void EGLRenderer::renderFrame(AVFrame* frame)
{
EGLImage imgs[EGL_MAX_PLANES];
if (frame == nullptr) {
// End of stream - unbind the GL context
SDL_GL_MakeCurrent(m_Window, nullptr);
return;
}
// Attach our GL context to the render thread
// NB: It should already be current, unless the SDL render event watcher
// performs a rendering operation (like a viewport update on resize) on

View File

@ -11,6 +11,7 @@ public:
virtual ~EGLRenderer() override;
virtual bool initialize(PDECODER_PARAMETERS params) override;
virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) override;
virtual void cleanupRenderContext() override;
virtual void renderFrame(AVFrame* frame) override;
virtual bool testRenderFrame(AVFrame* frame) override;
virtual void notifyOverlayUpdated(Overlay::OverlayType) override;

View File

@ -345,11 +345,6 @@ bool MmalRenderer::needsTestFrame()
void MmalRenderer::renderFrame(AVFrame* frame)
{
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
MMAL_BUFFER_HEADER_T* buffer = (MMAL_BUFFER_HEADER_T*)frame->data[3];
MMAL_STATUS_T status;

View File

@ -46,8 +46,9 @@ Pacer::~Pacer()
SDL_WaitThread(m_RenderThread, nullptr);
}
else {
// Send a null AVFrame to indicate end of stream on the main thread
m_VsyncRenderer->renderFrame(nullptr);
// Notify the renderer that it is being destroyed soon
// NB: This must happen on the same thread that calls renderFrame().
m_VsyncRenderer->cleanupRenderContext();
}
// Delete any remaining unconsumed frames
@ -110,8 +111,9 @@ int Pacer::renderThread(void* context)
me->renderLastFrameAndUnlock();
}
// Send a null AVFrame to indicate end of stream on the render thread
me->m_VsyncRenderer->renderFrame(nullptr);
// Notify the renderer that it is being destroyed soon
// NB: This must happen on the same thread that calls renderFrame().
me->m_VsyncRenderer->cleanupRenderContext();
return 0;
}

View File

@ -102,6 +102,11 @@ public:
virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) = 0;
virtual void renderFrame(AVFrame* frame) = 0;
// Called on the same thread as renderFrame() during destruction of the renderer
virtual void cleanupRenderContext() {
// Nothing
}
virtual bool testRenderFrame(AVFrame*) {
// If the renderer doesn't provide an explicit test routine,
// we will always assume that any returned AVFrame can be

View File

@ -344,11 +344,6 @@ void SdlRenderer::renderFrame(AVFrame* frame)
int err;
AVFrame* swFrame = nullptr;
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
if (frame->hw_frames_ctx != nullptr && frame->format != AV_PIX_FMT_CUDA) {
#ifdef HAVE_CUDA
ReadbackRetry:

View File

@ -351,11 +351,6 @@ int VAAPIRenderer::getDecoderColorspace()
void
VAAPIRenderer::renderFrame(AVFrame* frame)
{
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
VASurfaceID surface = (VASurfaceID)(uintptr_t)frame->data[3];
AVHWDeviceContext* deviceContext = (AVHWDeviceContext*)m_HwContext->data;
AVVAAPIDeviceContext* vaDeviceContext = (AVVAAPIDeviceContext*)deviceContext->hwctx;

View File

@ -476,11 +476,6 @@ void VDPAURenderer::renderOverlay(VdpOutputSurface destination, Overlay::Overlay
void VDPAURenderer::renderFrame(AVFrame* frame)
{
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
VdpStatus status;
VdpVideoSurface videoSurface = (VdpVideoSurface)(uintptr_t)frame->data[3];

View File

@ -161,11 +161,6 @@ public:
// Caller frees frame after we return
virtual void renderFrame(AVFrame* frame) override
{
if (frame == nullptr) {
// End of stream - nothing to do for us
return;
}
OSStatus status;
CVPixelBufferRef pixBuf = reinterpret_cast<CVPixelBufferRef>(frame->data[3]);