From 0e5852f6345ab5ce9af81d5c87c6ec6b006b4903 Mon Sep 17 00:00:00 2001 From: Anthony Serna Date: Sun, 29 May 2016 23:48:21 -0500 Subject: [PATCH] [OGL] Workaround nvidia being weird with GL_MAX_TEXTURE_SIZE --- Source/Core/VideoBackends/OGL/Render.cpp | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Source/Core/VideoBackends/OGL/Render.cpp b/Source/Core/VideoBackends/OGL/Render.cpp index fc28a3118f..d5f2bac843 100644 --- a/Source/Core/VideoBackends/OGL/Render.cpp +++ b/Source/Core/VideoBackends/OGL/Render.cpp @@ -52,7 +52,7 @@ void VideoConfig::UpdateProjectionHack() } static int OSDInternalW, OSDInternalH; -static int s_max_texture_size; +static int s_max_texture_size = 0; namespace OGL { @@ -711,9 +711,6 @@ void Renderer::Init() s_raster_font = std::make_unique(); OpenGL_CreateAttributelessVAO(); - - // Cache this, because if you do this multiple times a frame, it shows up really high on a profile. - glGetIntegerv(GL_MAX_TEXTURE_SIZE, &s_max_texture_size); } void Renderer::RenderText(const std::string& text, int left, int top, u32 color) @@ -1702,6 +1699,10 @@ bool Renderer::SaveScreenshot(const std::string &filename, const TargetRectangle int Renderer::GetMaxTextureSize() { + // Right now nvidia seems to do something very weird if we try to cache GL_MAX_TEXTURE_SIZE in init. This is a workaround that lets + // us keep the perf improvement that caching it gives us. + if (s_max_texture_size == 0) + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &s_max_texture_size); return s_max_texture_size; }