GPU: Use only the least significant 3 bits when reading the depth test func.

Some games set the full GL define value here (including nouveau), but others just seem to set those last 3 bits.
This commit is contained in:
Subv 2018-07-02 21:06:36 -05:00
parent 65c664560c
commit 6e0eba9917
1 changed files with 9 additions and 9 deletions

View File

@ -281,14 +281,14 @@ public:
}; };
enum class ComparisonOp : u32 { enum class ComparisonOp : u32 {
Never = 0x200, Never = 0,
Less = 0x201, Less = 1,
Equal = 0x202, Equal = 2,
LessEqual = 0x203, LessEqual = 3,
Greater = 0x204, Greater = 4,
NotEqual = 0x205, NotEqual = 5,
GreaterEqual = 0x206, GreaterEqual = 6,
Always = 0x207, Always = 7,
}; };
struct Cull { struct Cull {
@ -475,7 +475,7 @@ public:
INSERT_PADDING_WORDS(0x8); INSERT_PADDING_WORDS(0x8);
ComparisonOp depth_test_func; BitField<0, 3, ComparisonOp> depth_test_func;
INSERT_PADDING_WORDS(0xB); INSERT_PADDING_WORDS(0xB);