set ProgramShaderCache program format correctly.

This commit is contained in:
Shawn Hoffman 2012-01-18 10:22:03 -08:00 committed by Sonicadvance1
parent 10666a9b87
commit 1a8005d948
2 changed files with 4 additions and 3 deletions

View File

@ -29,7 +29,7 @@ GLintptr ProgramShaderCache::s_vs_data_offset;
LinearDiskCache<ProgramShaderCache::ShaderUID, u8> g_program_disk_cache;
GLenum ProgramFormat;
GLuint ProgramShaderCache::PCacheEntry::prog_format = ProgramShaderCache::PCacheEntry::SetProgramFormat();
GLuint ProgramShaderCache::PCacheEntry::prog_format = 0;
std::pair<u32, u32> ProgramShaderCache::CurrentShaderProgram;
const char *UniformNames[NUM_UNIFORMS] =
@ -215,9 +215,12 @@ void ProgramShaderCache::Init(void)
// Read our shader cache, only if supported
if (g_ActiveConfig.backend_info.bSupportsGLSLCache)
{
PCacheEntry::prog_format = PCacheEntry::SetProgramFormat();
char cache_filename[MAX_PATH];
sprintf(cache_filename, "%sogl-%s-shaders.cache", File::GetUserPath(D_SHADERCACHE_IDX).c_str(),
SConfig::GetInstance().m_LocalCoreStartupParameter.m_strUniqueID.c_str());
ProgramShaderCacheInserter inserter;
g_program_disk_cache.OpenAndRead(cache_filename, inserter);
}

View File

@ -78,8 +78,6 @@ public:
// No idea how necessary this is
static GLenum SetProgramFormat()
{
if(!g_ActiveConfig.backend_info.bSupportsGLSLCache)
return 0;
GLint Supported;
glGetIntegerv(GL_NUM_PROGRAM_BINARY_FORMATS, &Supported);