OpenGL: disable shader cache when ShaderDebugging is enabled

fixes 6859
This commit is contained in:
degasus 2013-12-09 16:45:20 +01:00
parent 42619c1d2d
commit 134c89ef98
1 changed files with 2 additions and 2 deletions

View File

@ -489,7 +489,7 @@ void ProgramShaderCache::Init(void)
} }
// Read our shader cache, only if supported // Read our shader cache, only if supported
if (g_ogl_config.bSupportsGLSLCache) if (g_ogl_config.bSupportsGLSLCache && !g_Config.bEnableShaderDebugging)
{ {
GLint Supported; GLint Supported;
glGetIntegerv(GL_NUM_PROGRAM_BINARY_FORMATS, &Supported); glGetIntegerv(GL_NUM_PROGRAM_BINARY_FORMATS, &Supported);
@ -522,7 +522,7 @@ void ProgramShaderCache::Init(void)
void ProgramShaderCache::Shutdown(void) void ProgramShaderCache::Shutdown(void)
{ {
// store all shaders in cache on disk // store all shaders in cache on disk
if (g_ogl_config.bSupportsGLSLCache) if (g_ogl_config.bSupportsGLSLCache && !g_Config.bEnableShaderDebugging)
{ {
PCache::iterator iter = pshaders.begin(); PCache::iterator iter = pshaders.begin();
for (; iter != pshaders.end(); ++iter) for (; iter != pshaders.end(); ++iter)