Fix interlaced output/sw switching

This commit is contained in:
Stenzek 2023-08-07 23:25:54 +10:00
parent febf864220
commit 9a9f03c974
4 changed files with 8 additions and 9 deletions

View File

@ -197,15 +197,11 @@ u32 GPUTextureBuffer::GetElementSize(Format format)
return element_size[static_cast<u32>(format)];
}
GPUDevice::~GPUDevice()
{
// TODO: move to Destroy() method
m_shader_cache.Close();
}
GPUDevice::~GPUDevice() = default;
RenderAPI GPUDevice::GetPreferredAPI()
{
#ifdef _WIN32___ // TODO remove me
#ifdef _WIN32
return RenderAPI::D3D11;
#else
return RenderAPI::Metal;
@ -270,6 +266,7 @@ void GPUDevice::Destroy()
if (HasSurface())
DestroySurface();
DestroyResources();
m_shader_cache.Close();
DestroyDevice();
}

View File

@ -560,8 +560,8 @@ void GPU_HW::ClearFramebuffer()
{
g_gpu_device->ClearRenderTarget(m_vram_texture.get(), 0);
g_gpu_device->ClearDepth(m_vram_depth_texture.get(), m_pgxp_depth_buffer ? 1.0f : 0.0f);
g_gpu_device->ClearRenderTarget(m_display_texture.get(), 0);
ClearVRAMDirtyRectangle();
g_gpu_device->ClearRenderTarget(m_display_texture.get(), 0);
m_last_depth_z = 1.0f;
}
@ -2411,9 +2411,10 @@ void GPU_HW::UpdateDisplay()
if (interlaced == InterlacedRenderMode::None)
g_gpu_device->InvalidateRenderTarget(m_display_texture.get());
g_gpu_device->SetFramebuffer(m_display_framebuffer.get());
g_gpu_device->SetPipeline(
m_display_pipelines[BoolToUInt8(m_GPUSTAT.display_area_color_depth_24)][static_cast<u8>(interlaced)].get());
g_gpu_device->SetFramebuffer(m_display_framebuffer.get());
g_gpu_device->SetTextureSampler(0, m_vram_texture.get(), g_gpu_device->GetNearestSampler());
const u32 reinterpret_field_offset = (interlaced != InterlacedRenderMode::None) ? GetInterlacedDisplayField() : 0;
const u32 reinterpret_start_x = m_crtc_state.regs.X * resolution_scale;

View File

@ -1635,7 +1635,7 @@ bool System::CreateGPU(GPURenderer renderer)
{
const RenderAPI api = Settings::GetRenderAPIForRenderer(renderer);
if (!g_gpu_device || g_gpu_device->GetRenderAPI() != api)
if (!g_gpu_device || (renderer != GPURenderer::Software && g_gpu_device->GetRenderAPI() != api))
{
if (g_gpu_device)
{

View File

@ -237,6 +237,7 @@ void ImGuiManager::DrawPerformanceOverlay()
if (g_settings.display_show_resolution)
{
// TODO: this seems wrong?
const auto [effective_width, effective_height] = g_gpu->GetEffectiveDisplayResolution();
const bool interlaced = g_gpu->IsInterlacedDisplayEnabled();
const bool pal = g_gpu->IsInPALMode();