gsdx dx/gl: fix depth optimization

4th bit is the fog value, not the depth
This commit is contained in:
Gregory Hainaut 2016-08-27 13:57:39 +02:00
parent 7b222768d4
commit b6693c4345
2 changed files with 2 additions and 2 deletions

View File

@ -159,7 +159,7 @@ void GSRendererDX::EmulateZbuffer()
GSVertex* v = &m_vertex.buff[0];
// Minor optimization of a corner case (it allow to better emulate some alpha test effects)
if (om_dssel.ztst == ZTST_GEQUAL && (m_vt.m_eq.xyzf & 0x8) && v[0].XYZ.Z == max_z) {
if (om_dssel.ztst == ZTST_GEQUAL && (m_vt.m_eq.xyzf & 0x4) && v[0].XYZ.Z == max_z) {
#ifdef _DEBUG
fprintf(stdout, "Optimize Z test GEQUAL to ALWAYS (%s)\n", psm_str(m_context->ZBUF.PSM));
#endif

View File

@ -253,7 +253,7 @@ void GSRendererOGL::EmulateZbuffer()
GSVertex* v = &m_vertex.buff[0];
// Minor optimization of a corner case (it allow to better emulate some alpha test effects)
if (m_om_dssel.ztst == ZTST_GEQUAL && (m_vt.m_eq.xyzf & 0x8) && v[0].XYZ.Z == max_z) {
if (m_om_dssel.ztst == ZTST_GEQUAL && (m_vt.m_eq.xyzf & 0x4) && v[0].XYZ.Z == max_z) {
GL_INS("Optimize Z test GEQUAL to ALWAYS (%s)", psm_str(m_context->ZBUF.PSM));
m_om_dssel.ztst = ZTST_ALWAYS;
}