mirror of https://github.com/PCSX2/pcsx2.git
gsdx ogl: use immutable texture, it would avoid multiple validation from the driver
git-svn-id: http://pcsx2.googlecode.com/svn/trunk@5629 96395faa-99c1-11dd-bbfe-3dabce05a288
This commit is contained in:
parent
b9bb764749
commit
15b255617a
|
@ -1359,13 +1359,12 @@ void GSDeviceOGL::CompileShaderFromSource(const std::string& glsl_file, const st
|
|||
GLint log_length = 0;
|
||||
glGetProgramiv(*program, GL_INFO_LOG_LENGTH, &log_length);
|
||||
if (log_length > 0) {
|
||||
char* log = (char*)malloc(log_length);
|
||||
glGetProgramInfoLog(*program, log_length, NULL, log);
|
||||
char* log = new char[log_length];
|
||||
glGetProgramInfoLog(*program, log_length, NULL, log);
|
||||
fprintf(stderr, "%s", log);
|
||||
free(log);
|
||||
delete[] log;
|
||||
}
|
||||
fprintf(stderr, "\n");
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ GSTextureOGL::GSTextureOGL(int type, int w, int h, bool msaa, int format, GLuint
|
|||
//FIXME I not sure we need a pixel buffer object. It seems more a texture
|
||||
// glGenBuffers(1, &m_texture_id);
|
||||
// m_texture_target = GL_PIXEL_UNPACK_BUFFER;
|
||||
// assert(0);
|
||||
// ASSERT(0);
|
||||
// Note there is also a buffer texture!!!
|
||||
// http://www.opengl.org/wiki/Buffer_Texture
|
||||
// Note: in this case it must use in GLSL
|
||||
|
@ -78,6 +78,7 @@ GSTextureOGL::GSTextureOGL(int type, int w, int h, bool msaa, int format, GLuint
|
|||
case GSTexture::Backbuffer:
|
||||
m_texture_target = 0;
|
||||
m_texture_id = 0;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -94,45 +95,34 @@ GSTextureOGL::GSTextureOGL(int type, int w, int h, bool msaa, int format, GLuint
|
|||
|
||||
// Allocate the buffer
|
||||
switch (m_type) {
|
||||
case GSTexture::DepthStencil:
|
||||
EnableUnit(0);
|
||||
glTexImage2D(m_texture_target, 0, m_format, m_size.x, m_size.y, 0, GL_DEPTH_STENCIL, GL_FLOAT_32_UNSIGNED_INT_24_8_REV, NULL);
|
||||
break;
|
||||
|
||||
case GSTexture::Offscreen:
|
||||
// Allocate a pbo with the texture
|
||||
if (m_format == GL_RGBA8) m_pbo_size = m_size.x * m_size.y * 4;
|
||||
else if (m_format == GL_R16UI) m_pbo_size = m_size.x * m_size.y * 2;
|
||||
else {
|
||||
fprintf(stderr, "wrong texture pixel format :%x\n", m_format);
|
||||
assert(0); // TODO Later
|
||||
ASSERT(0); // TODO Later
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pbo_id);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, m_pbo_size, NULL, GL_STREAM_DRAW);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
ASSERT(!m_msaa);
|
||||
|
||||
case GSTexture::DepthStencil:
|
||||
case GSTexture::RenderTarget:
|
||||
case GSTexture::Texture:
|
||||
// FIXME
|
||||
// FIXME: check the opensource driver
|
||||
// Howto allocate the texture unit !!!
|
||||
// In worst case the HW renderer seems to use 3 texture unit
|
||||
// For the moment SW renderer only use 1 so don't bother
|
||||
EnableUnit(0);
|
||||
// Did we need to setup a default sampler of the texture now?
|
||||
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
|
||||
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
|
||||
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
|
||||
if (m_format == GL_RGBA8)
|
||||
glTexImage2D(m_texture_target, 0, m_format, m_size.x, m_size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
||||
else if (m_format == GL_R16UI)
|
||||
glTexImage2D(m_texture_target, 0, m_format, m_size.x, m_size.y, 0, GL_RED_INTEGER, GL_UNSIGNED_SHORT, NULL);
|
||||
else if (m_format == GL_R8)
|
||||
glTexImage2D(m_texture_target, 0, m_format, m_size.x, m_size.y, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
|
||||
else {
|
||||
fprintf(stderr, "wrong texture pixel format :%x\n", m_format);
|
||||
assert(0); // TODO Later
|
||||
if (m_msaa) {
|
||||
ASSERT(m_texture_target == GL_TEXTURE_2D_MULTISAMPLE);
|
||||
// Require a recent GLEW and GL4.3
|
||||
//glTexStorage2DMultisample(m_texture_target, msaa_level, m_format, m_size.x, m_size.y, false);
|
||||
} else {
|
||||
glTexStorage2D(m_texture_target, 1, m_format, m_size.x, m_size.y);
|
||||
}
|
||||
break;
|
||||
default: break;
|
||||
|
@ -155,7 +145,7 @@ void GSTextureOGL::Attach(GLenum attachment)
|
|||
|
||||
bool GSTextureOGL::Update(const GSVector4i& r, const void* data, int pitch)
|
||||
{
|
||||
if (m_type == GSTexture::DepthStencil || m_type == GSTexture::Offscreen) assert(0);
|
||||
if (m_type == GSTexture::DepthStencil || m_type == GSTexture::Offscreen) ASSERT(0);
|
||||
|
||||
// FIXME warning order of the y axis
|
||||
// FIXME I'm not confident with GL_UNSIGNED_BYTE type
|
||||
|
@ -176,7 +166,7 @@ bool GSTextureOGL::Update(const GSVector4i& r, const void* data, int pitch)
|
|||
glTexSubImage2D(m_texture_target, 0, r.x, r.y, r.width(), r.height(), GL_RED, GL_R8, data);
|
||||
else {
|
||||
fprintf(stderr, "wrong texture pixel format :%x\n", m_format);
|
||||
assert(0);
|
||||
ASSERT(0);
|
||||
}
|
||||
#if 0
|
||||
//if (m_size.x != 16)
|
||||
|
@ -256,7 +246,7 @@ bool GSTextureOGL::Map(GSMap& m, const GSVector4i* r)
|
|||
glReadPixels(0, 0, m_size.x, m_size.y, GL_RED, GL_UNSIGNED_BYTE, 0);
|
||||
} else {
|
||||
fprintf(stderr, "wrong texture pixel format :%x\n", m_format);
|
||||
assert(0);
|
||||
ASSERT(0);
|
||||
}
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
|
|
Loading…
Reference in New Issue