GPU: Move resolution scale to hardware backend only

This commit is contained in:
Connor McLaughlin 2019-11-02 01:32:27 +10:00
parent c9feb7ea07
commit c52c0608ae
6 changed files with 21 additions and 15 deletions

View File

@ -28,7 +28,7 @@ bool GPU::Initialize(System* system, DMA* dma, InterruptController* interrupt_co
void GPU::UpdateSettings()
{
m_resolution_scale = std::clamp<u32>(m_system->GetSettings().gpu_resolution_scale, 1, m_max_resolution_scale);
}
void GPU::Reset()

View File

@ -52,9 +52,7 @@ public:
void DMARead(u32* words, u32 word_count);
void DMAWrite(const u32* words, u32 word_count);
// Resolution scaling.
u32 GetResolutionScale() const { return m_resolution_scale; }
u32 GetMaxResolutionScale() const { return m_max_resolution_scale; }
// Recompile shaders/recreate framebuffers when needed.
virtual void UpdateSettings();
// Ticks for hblank/vblank.
@ -299,10 +297,6 @@ protected:
InterruptController* m_interrupt_controller = nullptr;
Timers* m_timers = nullptr;
// Resolution scale.
u32 m_resolution_scale = 1;
u32 m_max_resolution_scale = 1;
union GPUSTAT
{
u32 bits;

View File

@ -17,10 +17,24 @@ void GPU_HW::Reset()
m_batch = {};
}
bool GPU_HW::Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers)
{
if (!GPU::Initialize(system, dma, interrupt_controller, timers))
return false;
m_resolution_scale = std::clamp<u32>(m_system->GetSettings().gpu_resolution_scale, 1, m_max_resolution_scale);
m_system->GetSettings().gpu_resolution_scale = m_resolution_scale;
m_system->GetSettings().max_gpu_resolution_scale = m_max_resolution_scale;
m_true_color = m_system->GetSettings().gpu_true_color;
return true;
}
void GPU_HW::UpdateSettings()
{
GPU::UpdateSettings();
m_resolution_scale = std::clamp<u32>(m_system->GetSettings().gpu_resolution_scale, 1, m_max_resolution_scale);
m_system->GetSettings().gpu_resolution_scale = m_resolution_scale;
m_true_color = m_system->GetSettings().gpu_true_color;
}

View File

@ -11,6 +11,7 @@ public:
GPU_HW();
virtual ~GPU_HW();
virtual bool Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers) override;
virtual void Reset() override;
virtual void UpdateSettings() override;
@ -108,6 +109,8 @@ protected:
std::string GenerateDisplayFragmentShader(bool depth_24bit, bool interlaced);
HWRenderBatch m_batch = {};
u32 m_resolution_scale = 1;
u32 m_max_resolution_scale = 1;
bool m_true_color = false;
private:

View File

@ -16,10 +16,11 @@ GPU_HW_OpenGL::~GPU_HW_OpenGL()
bool GPU_HW_OpenGL::Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers)
{
SetMaxResolutionScale();
if (!GPU_HW::Initialize(system, dma, interrupt_controller, timers))
return false;
SetMaxResolutionScale();
CreateFramebuffer();
CreateVertexBuffer();
if (!CompilePrograms())
@ -149,8 +150,6 @@ void GPU_HW_OpenGL::SetMaxResolutionScale()
m_max_resolution_scale = std::min(max_texture_scale, line_width_range[1]);
Log_InfoPrintf("Maximum resolution scale is %u", m_max_resolution_scale);
m_resolution_scale = std::min(m_resolution_scale, m_max_resolution_scale);
}
void GPU_HW_OpenGL::CreateFramebuffer()

View File

@ -130,10 +130,6 @@ bool System::CreateGPU()
m_bus->SetGPU(m_gpu.get());
m_dma->SetGPU(m_gpu.get());
// the new GPU could have a lower maximum resolution
m_settings.gpu_resolution_scale = m_gpu->GetResolutionScale();
m_settings.max_gpu_resolution_scale = m_gpu->GetMaxResolutionScale();
return true;
}