mirror of
https://github.com/moonlight-stream/moonlight-qt.git
synced 2025-07-01 15:26:09 +00:00
Use ComPtr for lifetime management in D3D11VA
This commit is contained in:
parent
0bb0d27d64
commit
9e811f54f1
@ -13,11 +13,11 @@
|
||||
|
||||
#include <dwmapi.h>
|
||||
|
||||
using Microsoft::WRL::ComPtr;
|
||||
|
||||
// Custom decoder GUID for Intel HEVC 444
|
||||
DEFINE_GUID(D3D11_DECODER_PROFILE_HEVC_VLD_Main444_10_Intel,0x6a6a81ba,0x912a,0x485d,0xb5,0x7f,0xcc,0xd2,0xd3,0x7b,0x8d,0x94);
|
||||
|
||||
#define SAFE_COM_RELEASE(x) if (x) { (x)->Release(); }
|
||||
|
||||
typedef struct _VERTEX
|
||||
{
|
||||
float x, y;
|
||||
@ -89,27 +89,14 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass)
|
||||
: m_DecoderSelectionPass(decoderSelectionPass),
|
||||
m_DevicesWithFL11Support(0),
|
||||
m_DevicesWithCodecSupport(0),
|
||||
m_Factory(nullptr),
|
||||
m_Device(nullptr),
|
||||
m_SwapChain(nullptr),
|
||||
m_DeviceContext(nullptr),
|
||||
m_RenderTargetView(nullptr),
|
||||
m_LastColorSpace(-1),
|
||||
m_LastFullRange(false),
|
||||
m_LastColorTrc(AVCOL_TRC_UNSPECIFIED),
|
||||
m_AllowTearing(false),
|
||||
m_VideoVertexBuffer(nullptr),
|
||||
m_VideoTexture(nullptr),
|
||||
m_OverlayLock(0),
|
||||
m_OverlayPixelShader(nullptr),
|
||||
m_HwDeviceContext(nullptr),
|
||||
m_HwFramesContext(nullptr)
|
||||
{
|
||||
RtlZeroMemory(m_OverlayVertexBuffers, sizeof(m_OverlayVertexBuffers));
|
||||
RtlZeroMemory(m_OverlayTextures, sizeof(m_OverlayTextures));
|
||||
RtlZeroMemory(m_OverlayTextureResourceViews, sizeof(m_OverlayTextureResourceViews));
|
||||
RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews));
|
||||
|
||||
m_ContextLock = SDL_CreateMutex();
|
||||
|
||||
DwmEnableMMCSS(TRUE);
|
||||
@ -121,38 +108,38 @@ D3D11VARenderer::~D3D11VARenderer()
|
||||
|
||||
SDL_DestroyMutex(m_ContextLock);
|
||||
|
||||
SAFE_COM_RELEASE(m_VideoVertexBuffer);
|
||||
m_VideoVertexBuffer.Reset();
|
||||
for (auto shader : m_VideoPixelShaders) {
|
||||
SAFE_COM_RELEASE(shader);
|
||||
shader.Reset();
|
||||
}
|
||||
|
||||
for (int i = 0; i < ARRAYSIZE(m_VideoTextureResourceViews); i++) {
|
||||
SAFE_COM_RELEASE(m_VideoTextureResourceViews[i][0]);
|
||||
SAFE_COM_RELEASE(m_VideoTextureResourceViews[i][1]);
|
||||
for (int i = 0; i < m_VideoTextureResourceViews.size(); i++) {
|
||||
for (int j = 0; j < m_VideoTextureResourceViews[i].size(); j++) {
|
||||
m_VideoTextureResourceViews[i][j].Reset();
|
||||
}
|
||||
}
|
||||
|
||||
SAFE_COM_RELEASE(m_VideoTexture);
|
||||
m_VideoTexture.Reset();
|
||||
|
||||
for (int i = 0; i < ARRAYSIZE(m_OverlayVertexBuffers); i++) {
|
||||
SAFE_COM_RELEASE(m_OverlayVertexBuffers[i]);
|
||||
for (int i = 0; i < m_OverlayVertexBuffers.size(); i++) {
|
||||
m_OverlayVertexBuffers[i].Reset();
|
||||
}
|
||||
|
||||
for (int i = 0; i < ARRAYSIZE(m_OverlayTextureResourceViews); i++) {
|
||||
SAFE_COM_RELEASE(m_OverlayTextureResourceViews[i]);
|
||||
for (int i = 0; i < m_OverlayTextureResourceViews.size(); i++) {
|
||||
m_OverlayTextureResourceViews[i].Reset();
|
||||
}
|
||||
|
||||
for (int i = 0; i < ARRAYSIZE(m_OverlayTextures); i++) {
|
||||
SAFE_COM_RELEASE(m_OverlayTextures[i]);
|
||||
for (int i = 0; i < m_OverlayTextures.size(); i++) {
|
||||
m_OverlayTextures[i].Reset();
|
||||
}
|
||||
|
||||
SAFE_COM_RELEASE(m_OverlayPixelShader);
|
||||
m_OverlayPixelShader.Reset();
|
||||
|
||||
SAFE_COM_RELEASE(m_RenderTargetView);
|
||||
SAFE_COM_RELEASE(m_SwapChain);
|
||||
m_RenderTargetView.Reset();
|
||||
m_SwapChain.Reset();
|
||||
|
||||
if (m_HwFramesContext != nullptr) {
|
||||
av_buffer_unref(&m_HwFramesContext);
|
||||
}
|
||||
av_buffer_unref(&m_HwDeviceContext);
|
||||
|
||||
// Force destruction of the swapchain immediately
|
||||
if (m_DeviceContext != nullptr) {
|
||||
@ -160,29 +147,22 @@ D3D11VARenderer::~D3D11VARenderer()
|
||||
m_DeviceContext->Flush();
|
||||
}
|
||||
|
||||
if (m_HwDeviceContext != nullptr) {
|
||||
// This will release m_Device and m_DeviceContext too
|
||||
av_buffer_unref(&m_HwDeviceContext);
|
||||
}
|
||||
else {
|
||||
SAFE_COM_RELEASE(m_Device);
|
||||
SAFE_COM_RELEASE(m_DeviceContext);
|
||||
}
|
||||
|
||||
SAFE_COM_RELEASE(m_Factory);
|
||||
m_Device.Reset();
|
||||
m_DeviceContext.Reset();
|
||||
m_Factory.Reset();
|
||||
}
|
||||
|
||||
bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound)
|
||||
{
|
||||
const D3D_FEATURE_LEVEL supportedFeatureLevels[] = { D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_11_1 };
|
||||
bool success = false;
|
||||
IDXGIAdapter1* adapter = nullptr;
|
||||
ComPtr<IDXGIAdapter1> adapter;
|
||||
DXGI_ADAPTER_DESC1 adapterDesc;
|
||||
D3D_FEATURE_LEVEL featureLevel;
|
||||
HRESULT hr;
|
||||
|
||||
SDL_assert(m_Device == nullptr);
|
||||
SDL_assert(m_DeviceContext == nullptr);
|
||||
SDL_assert(!m_Device);
|
||||
SDL_assert(!m_DeviceContext);
|
||||
|
||||
hr = m_Factory->EnumAdapters1(adapterIndex, &adapter);
|
||||
if (hr == DXGI_ERROR_NOT_FOUND) {
|
||||
@ -230,7 +210,7 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter
|
||||
adapterDesc.DeviceId,
|
||||
m_BindDecoderOutputTextures ? "bind" : "copy");
|
||||
|
||||
hr = D3D11CreateDevice(adapter,
|
||||
hr = D3D11CreateDevice(adapter.Get(),
|
||||
D3D_DRIVER_TYPE_UNKNOWN,
|
||||
nullptr,
|
||||
D3D11_CREATE_DEVICE_VIDEO_SUPPORT
|
||||
@ -256,12 +236,9 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter
|
||||
m_DevicesWithFL11Support++;
|
||||
}
|
||||
|
||||
if (!checkDecoderSupport(adapter)) {
|
||||
m_DeviceContext->Release();
|
||||
m_DeviceContext = nullptr;
|
||||
m_Device->Release();
|
||||
m_Device = nullptr;
|
||||
|
||||
if (!checkDecoderSupport(adapter.Get())) {
|
||||
m_DeviceContext.Reset();
|
||||
m_Device.Reset();
|
||||
goto Exit;
|
||||
}
|
||||
else {
|
||||
@ -273,9 +250,8 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter
|
||||
|
||||
Exit:
|
||||
if (adapterNotFound != nullptr) {
|
||||
*adapterNotFound = (adapter == nullptr);
|
||||
*adapterNotFound = !adapter;
|
||||
}
|
||||
SAFE_COM_RELEASE(adapter);
|
||||
return success;
|
||||
}
|
||||
|
||||
@ -334,8 +310,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params)
|
||||
}
|
||||
|
||||
if (adapterNotFound) {
|
||||
SDL_assert(m_Device == nullptr);
|
||||
SDL_assert(m_DeviceContext == nullptr);
|
||||
SDL_assert(!m_Device);
|
||||
SDL_assert(!m_DeviceContext);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -423,8 +399,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params)
|
||||
|
||||
// Always use windowed or borderless windowed mode.. SDL does mode-setting for us in
|
||||
// full-screen exclusive mode (SDL_WINDOW_FULLSCREEN), so this actually works out okay.
|
||||
IDXGISwapChain1* swapChain;
|
||||
hr = m_Factory->CreateSwapChainForHwnd(m_Device,
|
||||
ComPtr<IDXGISwapChain1> swapChain;
|
||||
hr = m_Factory->CreateSwapChainForHwnd(m_Device.Get(),
|
||||
info.info.win.window,
|
||||
&swapChainDesc,
|
||||
nullptr,
|
||||
@ -438,9 +414,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params)
|
||||
return false;
|
||||
}
|
||||
|
||||
hr = swapChain->QueryInterface(__uuidof(IDXGISwapChain4), (void**)&m_SwapChain);
|
||||
swapChain->Release();
|
||||
|
||||
hr = swapChain.As(&m_SwapChain);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"IDXGISwapChain::QueryInterface(IDXGISwapChain4) failed: %x",
|
||||
@ -478,9 +452,9 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params)
|
||||
AVHWDeviceContext* deviceContext = (AVHWDeviceContext*)m_HwDeviceContext->data;
|
||||
AVD3D11VADeviceContext* d3d11vaDeviceContext = (AVD3D11VADeviceContext*)deviceContext->hwctx;
|
||||
|
||||
// AVHWDeviceContext takes ownership of these objects
|
||||
d3d11vaDeviceContext->device = m_Device;
|
||||
d3d11vaDeviceContext->device_context = m_DeviceContext;
|
||||
// FFmpeg will take ownership of these pointers, so we use CopyTo() to bump the ref count
|
||||
m_Device.CopyTo(&d3d11vaDeviceContext->device);
|
||||
m_DeviceContext.CopyTo(&d3d11vaDeviceContext->device_context);
|
||||
|
||||
// Set lock functions that we will use to synchronize with FFmpeg's usage of our device context
|
||||
d3d11vaDeviceContext->lock = lockContext;
|
||||
@ -585,11 +559,11 @@ void D3D11VARenderer::renderFrame(AVFrame* frame)
|
||||
|
||||
// Clear the back buffer
|
||||
const float clearColor[4] = {0.0f, 0.0f, 0.0f, 0.0f};
|
||||
m_DeviceContext->ClearRenderTargetView(m_RenderTargetView, clearColor);
|
||||
m_DeviceContext->ClearRenderTargetView(m_RenderTargetView.Get(), clearColor);
|
||||
|
||||
// Bind the back buffer. This needs to be done each time,
|
||||
// because the render target view will be unbound by Present().
|
||||
m_DeviceContext->OMSetRenderTargets(1, &m_RenderTargetView, nullptr);
|
||||
m_DeviceContext->OMSetRenderTargets(1, m_RenderTargetView.GetAddressOf(), nullptr);
|
||||
|
||||
// Render our video frame with the aspect-ratio adjusted viewport
|
||||
renderVideo(frame);
|
||||
@ -670,39 +644,32 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type)
|
||||
return;
|
||||
}
|
||||
|
||||
ID3D11Texture2D* overlayTexture = m_OverlayTextures[type];
|
||||
ID3D11Buffer* overlayVertexBuffer = m_OverlayVertexBuffers[type];
|
||||
ID3D11ShaderResourceView* overlayTextureResourceView = m_OverlayTextureResourceViews[type];
|
||||
|
||||
if (overlayTexture == nullptr) {
|
||||
// Reference these objects so they don't immediately go away if the
|
||||
// overlay update thread tries to release them.
|
||||
ComPtr<ID3D11Texture2D> overlayTexture = m_OverlayTextures[type];
|
||||
ComPtr<ID3D11Buffer> overlayVertexBuffer = m_OverlayVertexBuffers[type];
|
||||
ComPtr<ID3D11ShaderResourceView> overlayTextureResourceView = m_OverlayTextureResourceViews[type];
|
||||
SDL_AtomicUnlock(&m_OverlayLock);
|
||||
|
||||
if (!overlayTexture) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Reference these objects so they don't immediately go away if the
|
||||
// overlay update thread tries to release them.
|
||||
SDL_assert(overlayVertexBuffer != nullptr);
|
||||
overlayTexture->AddRef();
|
||||
overlayVertexBuffer->AddRef();
|
||||
overlayTextureResourceView->AddRef();
|
||||
|
||||
SDL_AtomicUnlock(&m_OverlayLock);
|
||||
// If there was a texture, there must also be a vertex buffer and SRV
|
||||
SDL_assert(overlayVertexBuffer);
|
||||
SDL_assert(overlayTextureResourceView);
|
||||
|
||||
// Bind vertex buffer
|
||||
UINT stride = sizeof(VERTEX);
|
||||
UINT offset = 0;
|
||||
m_DeviceContext->IASetVertexBuffers(0, 1, &overlayVertexBuffer, &stride, &offset);
|
||||
m_DeviceContext->IASetVertexBuffers(0, 1, overlayVertexBuffer.GetAddressOf(), &stride, &offset);
|
||||
|
||||
// Bind pixel shader and resources
|
||||
m_DeviceContext->PSSetShader(m_OverlayPixelShader, nullptr, 0);
|
||||
m_DeviceContext->PSSetShaderResources(0, 1, &overlayTextureResourceView);
|
||||
m_DeviceContext->PSSetShader(m_OverlayPixelShader.Get(), nullptr, 0);
|
||||
m_DeviceContext->PSSetShaderResources(0, 1, overlayTextureResourceView.GetAddressOf());
|
||||
|
||||
// Draw the overlay
|
||||
m_DeviceContext->DrawIndexed(6, 0, 0);
|
||||
|
||||
overlayTextureResourceView->Release();
|
||||
overlayTexture->Release();
|
||||
overlayVertexBuffer->Release();
|
||||
}
|
||||
|
||||
void D3D11VARenderer::bindColorConversion(AVFrame* frame)
|
||||
@ -713,10 +680,10 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame)
|
||||
|
||||
// We have purpose-built shaders for the common Rec 601 (SDR) and Rec 2020 (HDR) YUV 4:2:0 cases
|
||||
if (!yuv444 && !fullRange && colorspace == COLORSPACE_REC_601) {
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::BT_601_LIMITED_YUV_420], nullptr, 0);
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::BT_601_LIMITED_YUV_420].Get(), nullptr, 0);
|
||||
}
|
||||
else if (!yuv444 && !fullRange && colorspace == COLORSPACE_REC_2020) {
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::BT_2020_LIMITED_YUV_420], nullptr, 0);
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::BT_2020_LIMITED_YUV_420].Get(), nullptr, 0);
|
||||
}
|
||||
else {
|
||||
if (yuv444) {
|
||||
@ -724,10 +691,10 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame)
|
||||
switch (m_TextureFormat)
|
||||
{
|
||||
case DXGI_FORMAT_AYUV:
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::GENERIC_AYUV], nullptr, 0);
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::GENERIC_AYUV].Get(), nullptr, 0);
|
||||
break;
|
||||
case DXGI_FORMAT_Y410:
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::GENERIC_Y410], nullptr, 0);
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::GENERIC_Y410].Get(), nullptr, 0);
|
||||
break;
|
||||
default:
|
||||
SDL_assert(false);
|
||||
@ -735,7 +702,7 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame)
|
||||
}
|
||||
else {
|
||||
// We'll need to use the generic 4:2:0 shader for this colorspace and color range combo
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::GENERIC_YUV_420], nullptr, 0);
|
||||
m_DeviceContext->PSSetShader(m_VideoPixelShaders[PixelShaders::GENERIC_YUV_420].Get(), nullptr, 0);
|
||||
}
|
||||
|
||||
// If nothing has changed since last frame, we're done
|
||||
@ -791,11 +758,10 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame)
|
||||
D3D11_SUBRESOURCE_DATA constData = {};
|
||||
constData.pSysMem = &constBuf;
|
||||
|
||||
ID3D11Buffer* constantBuffer;
|
||||
ComPtr<ID3D11Buffer> constantBuffer;
|
||||
HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->PSSetConstantBuffers(1, 1, &constantBuffer);
|
||||
constantBuffer->Release();
|
||||
m_DeviceContext->PSSetConstantBuffers(1, 1, constantBuffer.GetAddressOf());
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -814,15 +780,15 @@ void D3D11VARenderer::renderVideo(AVFrame* frame)
|
||||
// Bind video rendering vertex buffer
|
||||
UINT stride = sizeof(VERTEX);
|
||||
UINT offset = 0;
|
||||
m_DeviceContext->IASetVertexBuffers(0, 1, &m_VideoVertexBuffer, &stride, &offset);
|
||||
m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset);
|
||||
|
||||
UINT srvIndex;
|
||||
if (m_BindDecoderOutputTextures) {
|
||||
// Our indexing logic depends on a direct mapping into m_VideoTextureResourceViews
|
||||
// based on the texture index provided by FFmpeg.
|
||||
srvIndex = (uintptr_t)frame->data[1];
|
||||
SDL_assert(srvIndex < DECODER_BUFFER_POOL_SIZE);
|
||||
if (srvIndex >= DECODER_BUFFER_POOL_SIZE) {
|
||||
SDL_assert(srvIndex < m_VideoTextureResourceViews.size());
|
||||
if (srvIndex >= m_VideoTextureResourceViews.size()) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unexpected texture index: %u",
|
||||
srvIndex);
|
||||
@ -838,7 +804,7 @@ void D3D11VARenderer::renderVideo(AVFrame* frame)
|
||||
srcBox.bottom = m_DecoderParams.height;
|
||||
srcBox.front = 0;
|
||||
srcBox.back = 1;
|
||||
m_DeviceContext->CopySubresourceRegion(m_VideoTexture, 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox);
|
||||
m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox);
|
||||
|
||||
// SRV 0 is always mapped to the video texture
|
||||
srvIndex = 0;
|
||||
@ -848,7 +814,8 @@ void D3D11VARenderer::renderVideo(AVFrame* frame)
|
||||
bindColorConversion(frame);
|
||||
|
||||
// Bind SRVs for this frame
|
||||
m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews[srvIndex]);
|
||||
ID3D11ShaderResourceView* frameSrvs[] = { m_VideoTextureResourceViews[srvIndex][0].Get(), m_VideoTextureResourceViews[srvIndex][1].Get() };
|
||||
m_DeviceContext->PSSetShaderResources(0, 2, frameSrvs);
|
||||
|
||||
// Draw the video
|
||||
m_DeviceContext->DrawIndexed(6, 0, 0);
|
||||
@ -872,20 +839,11 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type)
|
||||
}
|
||||
|
||||
SDL_AtomicLock(&m_OverlayLock);
|
||||
ID3D11Texture2D* oldTexture = m_OverlayTextures[type];
|
||||
m_OverlayTextures[type] = nullptr;
|
||||
|
||||
ID3D11Buffer* oldVertexBuffer = m_OverlayVertexBuffers[type];
|
||||
m_OverlayVertexBuffers[type] = nullptr;
|
||||
|
||||
ID3D11ShaderResourceView* oldTextureResourceView = m_OverlayTextureResourceViews[type];
|
||||
m_OverlayTextureResourceViews[type] = nullptr;
|
||||
ComPtr<ID3D11Texture2D> oldTexture = std::move(m_OverlayTextures[type]);
|
||||
ComPtr<ID3D11Buffer> oldVertexBuffer = std::move(m_OverlayVertexBuffers[type]);
|
||||
ComPtr<ID3D11ShaderResourceView> oldTextureResourceView = std::move(m_OverlayTextureResourceViews[type]);
|
||||
SDL_AtomicUnlock(&m_OverlayLock);
|
||||
|
||||
SAFE_COM_RELEASE(oldTextureResourceView);
|
||||
SAFE_COM_RELEASE(oldTexture);
|
||||
SAFE_COM_RELEASE(oldVertexBuffer);
|
||||
|
||||
// If the overlay is disabled, we're done
|
||||
if (!overlayEnabled) {
|
||||
SDL_FreeSurface(newSurface);
|
||||
@ -913,7 +871,7 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type)
|
||||
texData.pSysMem = newSurface->pixels;
|
||||
texData.SysMemPitch = newSurface->pitch;
|
||||
|
||||
ID3D11Texture2D* newTexture;
|
||||
ComPtr<ID3D11Texture2D> newTexture;
|
||||
hr = m_Device->CreateTexture2D(&texDesc, &texData, &newTexture);
|
||||
if (FAILED(hr)) {
|
||||
SDL_FreeSurface(newSurface);
|
||||
@ -923,10 +881,9 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type)
|
||||
return;
|
||||
}
|
||||
|
||||
ID3D11ShaderResourceView* newTextureResourceView = nullptr;
|
||||
hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture, nullptr, &newTextureResourceView);
|
||||
ComPtr<ID3D11ShaderResourceView> newTextureResourceView;
|
||||
hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture.Get(), nullptr, &newTextureResourceView);
|
||||
if (FAILED(hr)) {
|
||||
SAFE_COM_RELEASE(newTexture);
|
||||
SDL_FreeSurface(newSurface);
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::CreateShaderResourceView() failed: %x",
|
||||
@ -976,11 +933,9 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type)
|
||||
D3D11_SUBRESOURCE_DATA vbData = {};
|
||||
vbData.pSysMem = verts;
|
||||
|
||||
ID3D11Buffer* newVertexBuffer;
|
||||
ComPtr<ID3D11Buffer> newVertexBuffer;
|
||||
hr = m_Device->CreateBuffer(&vbDesc, &vbData, &newVertexBuffer);
|
||||
if (FAILED(hr)) {
|
||||
SAFE_COM_RELEASE(newTextureResourceView);
|
||||
SAFE_COM_RELEASE(newTexture);
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::CreateBuffer() failed: %x",
|
||||
hr);
|
||||
@ -988,16 +943,16 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type)
|
||||
}
|
||||
|
||||
SDL_AtomicLock(&m_OverlayLock);
|
||||
m_OverlayVertexBuffers[type] = newVertexBuffer;
|
||||
m_OverlayTextures[type] = newTexture;
|
||||
m_OverlayTextureResourceViews[type] = newTextureResourceView;
|
||||
m_OverlayVertexBuffers[type] = std::move(newVertexBuffer);
|
||||
m_OverlayTextures[type] = std::move(newTexture);
|
||||
m_OverlayTextureResourceViews[type] = std::move(newTextureResourceView);
|
||||
SDL_AtomicUnlock(&m_OverlayLock);
|
||||
}
|
||||
|
||||
bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
{
|
||||
HRESULT hr;
|
||||
ID3D11VideoDevice* videoDevice;
|
||||
Microsoft::WRL::ComPtr<ID3D11VideoDevice> videoDevice;
|
||||
|
||||
DXGI_ADAPTER_DESC adapterDesc;
|
||||
hr = adapter->GetDesc(&adapterDesc);
|
||||
@ -1009,7 +964,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
}
|
||||
|
||||
// Derive a ID3D11VideoDevice from our ID3D11Device.
|
||||
hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&videoDevice);
|
||||
hr = m_Device.As(&videoDevice);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::QueryInterface(ID3D11VideoDevice) failed: %x",
|
||||
@ -1025,13 +980,11 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support H.264 decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support H.264 decoding to NV12 format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1040,13 +993,11 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC decoding to NV12 format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1055,13 +1006,11 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC Main10 decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC Main10 decoding to P010 format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1070,13 +1019,11 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support AV1 decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support AV1 decoding to NV12 format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1085,13 +1032,11 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support AV1 Main10 decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support AV1 Main10 decoding to P010 format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1099,19 +1044,16 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
case VIDEO_FORMAT_H265_REXT8_444:
|
||||
if (adapterDesc.VendorId != 0x8086) {
|
||||
// This custom D3D11VA profile is only supported on Intel GPUs
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_Main444_10_Intel, DXGI_FORMAT_AYUV, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC Main 444 8-bit decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC Main 444 8-bit decoding to AYUV format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1119,31 +1061,25 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter)
|
||||
case VIDEO_FORMAT_H265_REXT10_444:
|
||||
if (adapterDesc.VendorId != 0x8086) {
|
||||
// This custom D3D11VA profile is only supported on Intel GPUs
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_Main444_10_Intel, DXGI_FORMAT_Y410, &supported))) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC Main 444 10-bit decoding");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
else if (!supported) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU doesn't support HEVC Main 444 10-bit decoding to Y410 format");
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
SDL_assert(false);
|
||||
videoDevice->Release();
|
||||
return false;
|
||||
}
|
||||
|
||||
videoDevice->Release();
|
||||
|
||||
if (DXUtil::isFormatHybridDecodedByHardware(m_DecoderParams.videoFormat, adapterDesc.VendorId, adapterDesc.DeviceId)) {
|
||||
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GPU decoding for format %x is blocked due to hardware limitations",
|
||||
@ -1234,11 +1170,10 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
{
|
||||
QByteArray vertexShaderBytecode = Path::readDataFile("d3d11_vertex.fxc");
|
||||
|
||||
ID3D11VertexShader* vertexShader;
|
||||
ComPtr<ID3D11VertexShader> vertexShader;
|
||||
hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, &vertexShader);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->VSSetShader(vertexShader, nullptr, 0);
|
||||
vertexShader->Release();
|
||||
m_DeviceContext->VSSetShader(vertexShader.Get(), nullptr, 0);
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1252,11 +1187,10 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
{ "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
|
||||
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 },
|
||||
};
|
||||
ID3D11InputLayout* inputLayout;
|
||||
ComPtr<ID3D11InputLayout> inputLayout;
|
||||
hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), &inputLayout);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->IASetInputLayout(inputLayout);
|
||||
inputLayout->Release();
|
||||
m_DeviceContext->IASetInputLayout(inputLayout.Get());
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1304,11 +1238,10 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
samplerDesc.MinLOD = 0.0f;
|
||||
samplerDesc.MaxLOD = D3D11_FLOAT32_MAX;
|
||||
|
||||
ID3D11SamplerState* sampler;
|
||||
ComPtr<ID3D11SamplerState> sampler;
|
||||
hr = m_Device->CreateSamplerState(&samplerDesc, &sampler);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->PSSetSamplers(0, 1, &sampler);
|
||||
sampler->Release();
|
||||
m_DeviceContext->PSSetSamplers(0, 1, sampler.GetAddressOf());
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1320,7 +1253,7 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
|
||||
// Create our render target view
|
||||
{
|
||||
ID3D11Resource* backBufferResource;
|
||||
ComPtr<ID3D11Resource> backBufferResource;
|
||||
hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&backBufferResource);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1329,8 +1262,7 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
return false;
|
||||
}
|
||||
|
||||
hr = m_Device->CreateRenderTargetView(backBufferResource, nullptr, &m_RenderTargetView);
|
||||
backBufferResource->Release();
|
||||
hr = m_Device->CreateRenderTargetView(backBufferResource.Get(), nullptr, &m_RenderTargetView);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::CreateRenderTargetView() failed: %x",
|
||||
@ -1354,11 +1286,10 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
indexBufferData.pSysMem = indexes;
|
||||
indexBufferData.SysMemPitch = sizeof(int);
|
||||
|
||||
ID3D11Buffer* indexBuffer;
|
||||
ComPtr<ID3D11Buffer> indexBuffer;
|
||||
hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, &indexBuffer);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->IASetIndexBuffer(indexBuffer, DXGI_FORMAT_R32_UINT, 0);
|
||||
indexBuffer->Release();
|
||||
m_DeviceContext->IASetIndexBuffer(indexBuffer.Get(), DXGI_FORMAT_R32_UINT, 0);
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1436,11 +1367,10 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
D3D11_SUBRESOURCE_DATA constData = {};
|
||||
constData.pSysMem = chromaUVMax;
|
||||
|
||||
ID3D11Buffer* constantBuffer;
|
||||
ComPtr<ID3D11Buffer> constantBuffer;
|
||||
HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->PSSetConstantBuffers(0, 1, &constantBuffer);
|
||||
constantBuffer->Release();
|
||||
m_DeviceContext->PSSetConstantBuffers(0, 1, constantBuffer.GetAddressOf());
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1464,11 +1394,10 @@ bool D3D11VARenderer::setupRenderingResources()
|
||||
blendDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
|
||||
blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
|
||||
|
||||
ID3D11BlendState* blendState;
|
||||
ComPtr<ID3D11BlendState> blendState;
|
||||
hr = m_Device->CreateBlendState(&blendDesc, &blendState);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_DeviceContext->OMSetBlendState(blendState, nullptr, 0xffffffff);
|
||||
blendState->Release();
|
||||
m_DeviceContext->OMSetBlendState(blendState.Get(), nullptr, 0xffffffff);
|
||||
}
|
||||
else {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
@ -1531,7 +1460,6 @@ bool D3D11VARenderer::setupVideoTexture()
|
||||
|
||||
hr = m_Device->CreateTexture2D(&texDesc, nullptr, &m_VideoTexture);
|
||||
if (FAILED(hr)) {
|
||||
m_VideoTexture = nullptr;
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::CreateTexture2D() failed: %x",
|
||||
hr);
|
||||
@ -1545,12 +1473,11 @@ bool D3D11VARenderer::setupVideoTexture()
|
||||
srvDesc.Texture2D.MipLevels = 1;
|
||||
int srvIndex = 0;
|
||||
for (DXGI_FORMAT srvFormat : getVideoTextureSRVFormats()) {
|
||||
SDL_assert(srvIndex < 2);
|
||||
SDL_assert(srvIndex < m_VideoTextureResourceViews[0].size());
|
||||
|
||||
srvDesc.Format = srvFormat;
|
||||
hr = m_Device->CreateShaderResourceView(m_VideoTexture, &srvDesc, &m_VideoTextureResourceViews[0][srvIndex]);
|
||||
hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0][srvIndex]);
|
||||
if (FAILED(hr)) {
|
||||
m_VideoTextureResourceViews[0][srvIndex] = nullptr;
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::CreateShaderResourceView() failed: %x",
|
||||
hr);
|
||||
@ -1574,7 +1501,7 @@ bool D3D11VARenderer::setupTexturePoolViews(AVD3D11VAFramesContext* frameContext
|
||||
srvDesc.Texture2DArray.ArraySize = 1;
|
||||
|
||||
// Create luminance and chrominance SRVs for each texture in the pool
|
||||
for (int i = 0; i < DECODER_BUFFER_POOL_SIZE; i++) {
|
||||
for (int i = 0; i < m_VideoTextureResourceViews.size(); i++) {
|
||||
HRESULT hr;
|
||||
|
||||
// Our rendering logic depends on the texture index working to map into our SRV array
|
||||
@ -1584,14 +1511,13 @@ bool D3D11VARenderer::setupTexturePoolViews(AVD3D11VAFramesContext* frameContext
|
||||
|
||||
int srvIndex = 0;
|
||||
for (DXGI_FORMAT srvFormat : getVideoTextureSRVFormats()) {
|
||||
SDL_assert(srvIndex < 2);
|
||||
SDL_assert(srvIndex < m_VideoTextureResourceViews[i].size());
|
||||
|
||||
srvDesc.Format = srvFormat;
|
||||
hr = m_Device->CreateShaderResourceView(frameContext->texture_infos[i].texture,
|
||||
&srvDesc,
|
||||
&m_VideoTextureResourceViews[i][srvIndex]);
|
||||
if (FAILED(hr)) {
|
||||
m_VideoTextureResourceViews[i][srvIndex] = nullptr;
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"ID3D11Device::CreateShaderResourceView() failed: %x",
|
||||
hr);
|
||||
|
@ -9,6 +9,8 @@ extern "C" {
|
||||
#include <libavutil/hwcontext_d3d11va.h>
|
||||
}
|
||||
|
||||
#include <wrl/client.h>
|
||||
|
||||
class D3D11VARenderer : public IFFmpegRenderer
|
||||
{
|
||||
public:
|
||||
@ -51,11 +53,11 @@ private:
|
||||
int m_DevicesWithFL11Support;
|
||||
int m_DevicesWithCodecSupport;
|
||||
|
||||
IDXGIFactory5* m_Factory;
|
||||
ID3D11Device* m_Device;
|
||||
IDXGISwapChain4* m_SwapChain;
|
||||
ID3D11DeviceContext* m_DeviceContext;
|
||||
ID3D11RenderTargetView* m_RenderTargetView;
|
||||
Microsoft::WRL::ComPtr<IDXGIFactory5> m_Factory;
|
||||
Microsoft::WRL::ComPtr<ID3D11Device> m_Device;
|
||||
Microsoft::WRL::ComPtr<IDXGISwapChain4> m_SwapChain;
|
||||
Microsoft::WRL::ComPtr<ID3D11DeviceContext> m_DeviceContext;
|
||||
Microsoft::WRL::ComPtr<ID3D11RenderTargetView> m_RenderTargetView;
|
||||
SDL_mutex* m_ContextLock;
|
||||
bool m_BindDecoderOutputTextures;
|
||||
|
||||
@ -70,21 +72,21 @@ private:
|
||||
|
||||
bool m_AllowTearing;
|
||||
|
||||
std::array<ID3D11PixelShader*, PixelShaders::_COUNT> m_VideoPixelShaders;
|
||||
ID3D11Buffer* m_VideoVertexBuffer;
|
||||
std::array<Microsoft::WRL::ComPtr<ID3D11PixelShader>, PixelShaders::_COUNT> m_VideoPixelShaders;
|
||||
Microsoft::WRL::ComPtr<ID3D11Buffer> m_VideoVertexBuffer;
|
||||
|
||||
// Only valid if !m_BindDecoderOutputTextures
|
||||
ID3D11Texture2D* m_VideoTexture;
|
||||
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_VideoTexture;
|
||||
|
||||
// Only index 0 is valid if !m_BindDecoderOutputTextures
|
||||
#define DECODER_BUFFER_POOL_SIZE 17
|
||||
ID3D11ShaderResourceView* m_VideoTextureResourceViews[DECODER_BUFFER_POOL_SIZE][2];
|
||||
std::array<std::array<Microsoft::WRL::ComPtr<ID3D11ShaderResourceView>, 2>, DECODER_BUFFER_POOL_SIZE> m_VideoTextureResourceViews;
|
||||
|
||||
SDL_SpinLock m_OverlayLock;
|
||||
ID3D11Buffer* m_OverlayVertexBuffers[Overlay::OverlayMax];
|
||||
ID3D11Texture2D* m_OverlayTextures[Overlay::OverlayMax];
|
||||
ID3D11ShaderResourceView* m_OverlayTextureResourceViews[Overlay::OverlayMax];
|
||||
ID3D11PixelShader* m_OverlayPixelShader;
|
||||
std::array<Microsoft::WRL::ComPtr<ID3D11Buffer>, Overlay::OverlayMax> m_OverlayVertexBuffers;
|
||||
std::array<Microsoft::WRL::ComPtr<ID3D11Texture2D>, Overlay::OverlayMax> m_OverlayTextures;
|
||||
std::array<Microsoft::WRL::ComPtr<ID3D11ShaderResourceView>, Overlay::OverlayMax> m_OverlayTextureResourceViews;
|
||||
Microsoft::WRL::ComPtr<ID3D11PixelShader> m_OverlayPixelShader;
|
||||
|
||||
AVBufferRef* m_HwDeviceContext;
|
||||
AVBufferRef* m_HwFramesContext;
|
||||
|
Loading…
x
Reference in New Issue
Block a user