GPUBackend: Don't rely on g_settings

This commit is contained in:
Stenzek 2024-09-27 20:57:53 +10:00
parent 4e880280bd
commit 615781be39
No known key found for this signature in database
6 changed files with 16 additions and 17 deletions

View File

@ -2,7 +2,6 @@
// SPDX-License-Identifier: CC-BY-NC-ND-4.0
#include "gpu_backend.h"
#include "settings.h"
#include "util/state_wrapper.h"
@ -18,9 +17,9 @@ GPUBackend::GPUBackend() = default;
GPUBackend::~GPUBackend() = default;
bool GPUBackend::Initialize(bool force_thread)
bool GPUBackend::Initialize(bool use_thread)
{
if (force_thread || g_settings.gpu_use_thread)
if (use_thread)
StartGPUThread();
return true;
@ -32,13 +31,13 @@ void GPUBackend::Reset()
DrawingAreaChanged(GPUDrawingArea{0, 0, 0, 0}, GSVector4i::zero());
}
void GPUBackend::UpdateSettings()
void GPUBackend::SetThreadEnabled(bool use_thread)
{
Sync(true);
if (m_use_gpu_thread != g_settings.gpu_use_thread)
if (m_use_gpu_thread != use_thread)
{
if (!g_settings.gpu_use_thread)
if (!use_thread)
StopGPUThread();
else
StartGPUThread();

View File

@ -26,11 +26,12 @@ public:
ALWAYS_INLINE const Threading::Thread* GetThread() const { return m_use_gpu_thread ? &m_gpu_thread : nullptr; }
virtual bool Initialize(bool force_thread);
virtual void UpdateSettings();
virtual bool Initialize(bool use_thread);
virtual void Reset();
virtual void Shutdown();
void SetThreadEnabled(bool use_thread);
GPUBackendFillVRAMCommand* NewFillVRAMCommand();
GPUBackendUpdateVRAMCommand* NewUpdateVRAMCommand(u32 num_words);
GPUBackendCopyVRAMCommand* NewCopyVRAMCommand();

View File

@ -2,6 +2,7 @@
// SPDX-License-Identifier: CC-BY-NC-ND-4.0
#include "gpu_sw.h"
#include "settings.h"
#include "system.h"
#include "util/gpu_device.h"
@ -36,7 +37,7 @@ bool GPU_SW::IsHardwareRenderer() const
bool GPU_SW::Initialize()
{
if (!GPU::Initialize() || !m_backend.Initialize(false))
if (!GPU::Initialize() || !m_backend.Initialize(g_settings.gpu_use_thread))
return false;
static constexpr const std::array formats_for_16bit = {GPUTexture::Format::RGB565, GPUTexture::Format::RGBA5551,
@ -82,7 +83,8 @@ void GPU_SW::Reset(bool clear_vram)
void GPU_SW::UpdateSettings(const Settings& old_settings)
{
GPU::UpdateSettings(old_settings);
m_backend.UpdateSettings();
if (g_settings.gpu_use_thread != old_settings.gpu_use_thread)
m_backend.SetThreadEnabled(g_settings.gpu_use_thread);
}
GPUTexture* GPU_SW::GetDisplayTexture(u32 width, u32 height, GPUTexture::Format format)

View File

@ -14,9 +14,9 @@ GPU_SW_Backend::GPU_SW_Backend() = default;
GPU_SW_Backend::~GPU_SW_Backend() = default;
bool GPU_SW_Backend::Initialize(bool force_thread)
bool GPU_SW_Backend::Initialize(bool use_thread)
{
return GPUBackend::Initialize(force_thread);
return GPUBackend::Initialize(use_thread);
}
void GPU_SW_Backend::Reset()

View File

@ -14,7 +14,7 @@ public:
GPU_SW_Backend();
~GPU_SW_Backend() override;
bool Initialize(bool force_thread) override;
bool Initialize(bool use_thread) override;
void Reset() override;
protected:

View File

@ -9,7 +9,6 @@
#include "common/intrin.h"
#include "common/types.h"
#include <algorithm>
#include <array>
namespace GPU_SW_Rasterizer {
@ -76,7 +75,7 @@ ALWAYS_INLINE static DrawTriangleFunction GetDrawTriangleFunction(bool shading_e
}
// Have to define the symbols globally, because clang won't include them otherwise.
#if defined(CPU_ARCH_SSE) && defined(_MSC_VER)
#if defined(CPU_ARCH_SSE) && 0
#define ALTERNATIVE_RASTERIZER_LIST() DECLARE_ALTERNATIVE_RASTERIZER(AVX2)
#else
#define ALTERNATIVE_RASTERIZER_LIST()
@ -87,5 +86,3 @@ ALTERNATIVE_RASTERIZER_LIST()
#undef DECLARE_ALTERNATIVE_RASTERIZER
} // namespace GPU_SW_Rasterizer
// static u32 s_bad_counter = 0;