ShaderGen: Don't emit integer outputs when logic op is unsupported

This may have been causing issues for D3D10 hardware, where logic op was
not supported.
This commit is contained in:
Stenzek 2018-05-26 00:09:29 +10:00
parent 9a5c2119e5
commit 57976c947b
2 changed files with 6 additions and 4 deletions

View File

@ -344,8 +344,9 @@ void ClearUnusedPixelShaderUidBits(APIType ApiType, const ShaderHostConfig& host
pixel_shader_uid_data* uid_data = uid->GetUidData<pixel_shader_uid_data>();
// OpenGL and Vulkan convert implicitly normalized color outputs to their uint representation.
// Therefore, it is not necessary to use a uint output on these backends.
if (ApiType != APIType::D3D)
// Therefore, it is not necessary to use a uint output on these backends. We also disable the
// uint output when logic op is not supported (i.e. driver/device does not support D3D11.1).
if (ApiType != APIType::D3D || !host_config.backend_logic_op)
uid_data->uint_output = 0;
}

View File

@ -35,8 +35,9 @@ void ClearUnusedPixelShaderUidBits(APIType ApiType, const ShaderHostConfig& host
pixel_ubershader_uid_data* uid_data = uid->GetUidData<pixel_ubershader_uid_data>();
// OpenGL and Vulkan convert implicitly normalized color outputs to their uint representation.
// Therefore, it is not necessary to use a uint output on these backends.
if (ApiType != APIType::D3D)
// Therefore, it is not necessary to use a uint output on these backends. We also disable the
// uint output when logic op is not supported (i.e. driver/device does not support D3D11.1).
if (ApiType != APIType::D3D || !host_config.backend_logic_op)
uid_data->uint_output = 0;
}