System: Set GPU timing enable on init/destroy

It's really unrelated to the guest GPU.
This commit is contained in:
Stenzek 2024-09-21 16:13:12 +10:00
parent e7aa0495ab
commit e617f17294
No known key found for this signature in database
2 changed files with 7 additions and 7 deletions

View File

@ -80,9 +80,6 @@ GPU::~GPU()
JoinScreenshotThreads();
DestroyDeinterlaceTextures();
g_gpu_device->RecycleTexture(std::move(m_chroma_smoothing_texture));
if (g_gpu_device)
g_gpu_device->SetGPUTimingEnabled(false);
}
bool GPU::Initialize()
@ -101,8 +98,6 @@ bool GPU::Initialize()
return false;
}
g_gpu_device->SetGPUTimingEnabled(g_settings.display_show_gpu_usage);
#ifdef PSX_GPU_STATS
s_active_gpu_cycles = 0;
s_active_gpu_cycles_frames = 0;
@ -147,8 +142,6 @@ void GPU::UpdateSettings(const Settings& old_settings)
Panic("Failed to compile display pipeline on settings change.");
}
}
g_gpu_device->SetGPUTimingEnabled(g_settings.display_show_gpu_usage);
}
void GPU::CPUClockChanged()

View File

@ -1911,6 +1911,9 @@ bool System::Initialize(bool force_software_renderer, Error* error)
s_cpu_thread_handle = Threading::ThreadHandle::GetForCallingThread();
if (g_settings.display_show_gpu_usage)
g_gpu_device->SetGPUTimingEnabled(true);
UpdateThrottlePeriod();
UpdateMemorySaveStateSettings();
return true;
@ -1971,6 +1974,7 @@ void System::DestroySystem()
// Restore present-all-frames behavior.
if (s_keep_gpu_device_on_shutdown && g_gpu_device)
{
g_gpu_device->SetGPUTimingEnabled(false);
UpdateDisplayVSync();
}
else
@ -4447,6 +4451,9 @@ void System::CheckForSettingsChanges(const Settings& old_settings)
UpdateSpeedLimiterState();
}
if (g_settings.display_show_gpu_usage != old_settings.display_show_gpu_usage)
g_gpu_device->SetGPUTimingEnabled(g_settings.display_show_gpu_usage);
if (g_settings.inhibit_screensaver != old_settings.inhibit_screensaver)
{
if (g_settings.inhibit_screensaver)