From 6e0eba99177e8562e7f4e3ef8709b74a71447a46 Mon Sep 17 00:00:00 2001 From: Subv Date: Mon, 2 Jul 2018 21:06:36 -0500 Subject: [PATCH] GPU: Use only the least significant 3 bits when reading the depth test func. Some games set the full GL define value here (including nouveau), but others just seem to set those last 3 bits. --- src/video_core/engines/maxwell_3d.h | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/video_core/engines/maxwell_3d.h b/src/video_core/engines/maxwell_3d.h index ff67f2a58b..12aec35494 100644 --- a/src/video_core/engines/maxwell_3d.h +++ b/src/video_core/engines/maxwell_3d.h @@ -281,14 +281,14 @@ public: }; enum class ComparisonOp : u32 { - Never = 0x200, - Less = 0x201, - Equal = 0x202, - LessEqual = 0x203, - Greater = 0x204, - NotEqual = 0x205, - GreaterEqual = 0x206, - Always = 0x207, + Never = 0, + Less = 1, + Equal = 2, + LessEqual = 3, + Greater = 4, + NotEqual = 5, + GreaterEqual = 6, + Always = 7, }; struct Cull { @@ -475,7 +475,7 @@ public: INSERT_PADDING_WORDS(0x8); - ComparisonOp depth_test_func; + BitField<0, 3, ComparisonOp> depth_test_func; INSERT_PADDING_WORDS(0xB);