Get the program binary type correctly or else ATI makes massive (~400MB) shader caches. Also, don't need the line in the PixelShaderGen.

This commit is contained in:
Ryan Houdek 2011-12-29 01:35:50 -06:00 committed by Sonicadvance1
parent 2e15440896
commit 03b09bed5d
3 changed files with 19 additions and 19 deletions

View File

@ -577,8 +577,6 @@ const char *GeneratePixelShaderCode(DSTALPHA_MODE dstAlphaMode, API_TYPE ApiType
if (g_ActiveConfig.backend_info.bSupportsGLSLATTRBind)
WRITE(p, "#extension GL_ARB_explicit_attrib_location : enable\n");
if (g_ActiveConfig.backend_info.bSupportsGLSLBlend)
WRITE(p, "#extension GL_ARB_blend_func_extended : enable\n");
// Silly differences
WRITE(p, "#define float2 vec2\n");
WRITE(p, "#define float3 vec3\n");

View File

@ -25,6 +25,7 @@ GLuint ProgramShaderCache::CurrentFShader = 0, ProgramShaderCache::CurrentVShade
ProgramShaderCache::PCache ProgramShaderCache::pshaders;
GLuint ProgramShaderCache::s_ps_vs_ubo;
GLintptr ProgramShaderCache::s_vs_data_offset;
GLenum ProgramShaderCache::prog_format;
LinearDiskCache<ProgramShaderCache::ShaderUID, u8> g_program_disk_cache;
GLenum ProgramFormat;
@ -213,6 +214,16 @@ void ProgramShaderCache::Init(void)
// Read our shader cache, only if supported
if (g_ActiveConfig.backend_info.bSupportsGLSLCache)
{
GLint Supported;
glGetIntegerv(GL_NUM_PROGRAM_BINARY_FORMATS, &Supported);
GLint *Formats = new GLint[Supported];
glGetIntegerv(GL_PROGRAM_BINARY_FORMATS, Formats);
// We don't really care about format
// We just need the correct data type
prog_format = (GLenum)Formats[0];
delete[] Formats;
char cache_filename[MAX_PATH];
sprintf(cache_filename, "%sogl-%s-shaders.cache", File::GetUserPath(D_SHADERCACHE_IDX).c_str(),
SConfig::GetInstance().m_LocalCoreStartupParameter.m_strUniqueID.c_str());

View File

@ -41,7 +41,6 @@ public:
struct PCacheEntry
{
GLuint prog_id;
GLenum prog_format;
u8 *binary;
GLint binary_size;
GLuint vsid, psid;
@ -75,25 +74,15 @@ public:
glGetProgramiv(prog_id, GL_PROGRAM_BINARY_LENGTH, &binary_size);
}
// No idea how necessary this is
void SetProgramFormat()
{
GLint Supported;
glGetIntegerv(GL_NUM_PROGRAM_BINARY_FORMATS, &Supported);
GLint *Formats = new GLint[Supported];
glGetIntegerv(GL_PROGRAM_BINARY_FORMATS, Formats);
// We don't really care about format
prog_format = (GLenum)Formats[0];
delete[] Formats;
}
u8 *GetProgram()
{
UpdateSize();
FreeProgram();
binary = new u8[binary_size];
glGetProgramBinary(prog_id, binary_size, NULL, &prog_format, binary);
GLenum _form;
glGetProgramBinary(prog_id, binary_size, NULL, &_form, binary);
if (_form != prog_format)
ERROR_LOG(VIDEO, "Returned format not the same as expected! %d vs %d", _form, prog_format);
return binary;
}
@ -130,8 +119,7 @@ private:
// But it is fine, no need to worry about that
PCacheEntry entry;
entry.Create(key.first, key.second);
glProgramBinary(entry.prog_id, entry.prog_format, value, value_size);
glProgramBinary(entry.prog_id, prog_format, value, value_size);
GLint success;
glGetProgramiv(entry.prog_id, GL_LINK_STATUS, &success);
@ -153,6 +141,9 @@ private:
static GLuint s_ps_vs_ubo;
static GLintptr s_vs_data_offset;
static GLenum prog_format;
static void SetProgramVariables(PCacheEntry &entry);
};