OpenGL: Check the list of supported AA modes instead of hardcoding

This commit is contained in:
Pokechu22 2023-03-19 22:20:34 -07:00
parent c63f0f37cd
commit b246a634d4
7 changed files with 133 additions and 20 deletions

View File

@ -1674,7 +1674,7 @@ const GLFunc gl_function_array[] = {
GLFUNC_REQUIRES(glDrawArraysInstancedBaseInstance, "VERSION_4_2"),
GLFUNC_REQUIRES(glDrawElementsInstancedBaseInstance, "VERSION_4_2"),
GLFUNC_REQUIRES(glDrawElementsInstancedBaseVertexBaseInstance, "VERSION_4_2"),
GLFUNC_REQUIRES(glGetInternalformativ, "VERSION_4_2"),
GLFUNC_REQUIRES(glGetInternalformativ, "VERSION_4_2 |VERSION_GLES_3"),
GLFUNC_REQUIRES(glGetActiveAtomicCounterBufferiv, "VERSION_4_2"),
GLFUNC_REQUIRES(glBindImageTexture, "VERSION_4_2"),
GLFUNC_REQUIRES(glMemoryBarrier, "VERSION_4_2"),

View File

@ -3,6 +3,11 @@
#include "VideoBackends/OGL/OGLConfig.h"
#include <cstdio>
#include <string>
#include <string_view>
#include "Common/Assert.h"
#include "Common/GL/GLContext.h"
#include "Common/GL/GLExtensions/GLExtensions.h"
#include "Common/Logging/LogManager.h"
@ -10,14 +15,13 @@
#include "Core/Config/GraphicsSettings.h"
#include "VideoBackends/OGL/OGLTexture.h"
#include "VideoCommon/DriverDetails.h"
#include "VideoCommon/FramebufferManager.h"
#include "VideoCommon/OnScreenDisplay.h"
#include "VideoCommon/VideoConfig.h"
#include <cstdio>
#include <string>
#include <string_view>
namespace OGL
{
void InitDriverInfo()
@ -534,9 +538,118 @@ bool PopulateConfig(GLContext* m_main_gl_context)
g_Config.backend_info.bSupportsEarlyZ =
g_ogl_config.bSupportsImageLoadStore || g_ogl_config.bSupportsConservativeDepth;
glGetIntegerv(GL_MAX_SAMPLES, &g_ogl_config.max_samples);
if (g_ogl_config.max_samples < 1 || !g_ogl_config.bSupportsMSAA)
g_ogl_config.max_samples = 1;
g_Config.backend_info.AAModes.clear();
if (g_ogl_config.bSupportsMSAA)
{
bool supportsGetInternalFormat =
GLExtensions::Supports("VERSION_4_2") || GLExtensions::Supports("VERSION_GLES_3");
if (supportsGetInternalFormat)
{
// Note: GL_TEXTURE_2D_MULTISAMPLE_ARRAY_OES should technically be used for
// GL_OES_texture_storage_multisample_2d_array, but both are 0x9102 so it does not matter.
std::vector<int> color_aa_modes;
{
GLenum colorInternalFormat = OGLTexture::GetGLInternalFormatForTextureFormat(
FramebufferManager::GetEFBColorFormat(), true);
GLint num_color_sample_counts = 0;
glGetInternalformativ(GL_TEXTURE_2D_MULTISAMPLE_ARRAY, colorInternalFormat,
GL_NUM_SAMPLE_COUNTS, 1, &num_color_sample_counts);
ASSERT_MSG(VIDEO, num_color_sample_counts >= 0,
"negative GL_NUM_SAMPLE_COUNTS for colors does not make sense: {}",
num_color_sample_counts);
color_aa_modes.reserve(num_color_sample_counts + 1);
if (num_color_sample_counts > 0)
{
color_aa_modes.resize(num_color_sample_counts);
static_assert(sizeof(GLint) == sizeof(u32));
glGetInternalformativ(GL_TEXTURE_2D_MULTISAMPLE_ARRAY, colorInternalFormat, GL_SAMPLES,
num_color_sample_counts,
reinterpret_cast<GLint*>(color_aa_modes.data()));
ASSERT_MSG(VIDEO, std::is_sorted(color_aa_modes.rbegin(), color_aa_modes.rend()),
"GPU driver didn't return sorted color AA modes: [{}]",
fmt::join(color_aa_modes, ", "));
}
if (color_aa_modes.empty() || color_aa_modes.back() != 1)
color_aa_modes.push_back(1);
}
std::vector<int> depth_aa_modes;
{
GLenum depthInternalFormat = OGLTexture::GetGLInternalFormatForTextureFormat(
FramebufferManager::GetEFBColorFormat(), true);
GLint num_depth_sample_counts = 0;
glGetInternalformativ(GL_TEXTURE_2D_MULTISAMPLE_ARRAY, depthInternalFormat,
GL_NUM_SAMPLE_COUNTS, 1, &num_depth_sample_counts);
ASSERT_MSG(VIDEO, num_depth_sample_counts >= 0,
"negative GL_NUM_SAMPLE_COUNTS for depth does not make sense: {}",
num_depth_sample_counts);
depth_aa_modes.reserve(num_depth_sample_counts + 1);
if (num_depth_sample_counts > 0)
{
depth_aa_modes.resize(num_depth_sample_counts);
static_assert(sizeof(GLint) == sizeof(u32));
glGetInternalformativ(GL_TEXTURE_2D_MULTISAMPLE_ARRAY, depthInternalFormat, GL_SAMPLES,
num_depth_sample_counts,
reinterpret_cast<GLint*>(depth_aa_modes.data()));
ASSERT_MSG(VIDEO, std::is_sorted(depth_aa_modes.rbegin(), depth_aa_modes.rend()),
"GPU driver didn't return sorted depth AA modes: [{}]",
fmt::join(depth_aa_modes, ", "));
}
if (depth_aa_modes.empty() || depth_aa_modes.back() != 1)
depth_aa_modes.push_back(1);
}
// The spec says supported sample formats are returned in descending numeric order.
// It also says "Only positive values are returned", but does not specify whether 1 is
// included or not; it seems like NVIDIA and Intel GPUs do not include it.
// We've inserted 1 at the back of both if not present to handle this.
g_Config.backend_info.AAModes.clear();
g_Config.backend_info.AAModes.reserve(std::min(color_aa_modes.size(), depth_aa_modes.size()));
// We only want AA modes that are supported for both the color and depth textures. Probably
// the support is the same, though. rbegin/rend are used to swap the order ahead of time.
std::set_intersection(color_aa_modes.rbegin(), color_aa_modes.rend(), depth_aa_modes.rbegin(),
depth_aa_modes.rend(),
std::back_inserter(g_Config.backend_info.AAModes));
}
else
{
// The documentation for glGetInternalformativ says its value is at least the minimum of
// GL_MAX_SAMPLES, GL_MAX_COLOR_TEXTURE_SAMPLES, and GL_MAX_DEPTH_TEXTURE_SAMPLES (and
// GL_MAX_INTEGER_SAMPLES for integer textures, assumed not applicable here).
GLint max_color_samples = 0;
glGetIntegerv(GL_MAX_COLOR_TEXTURE_SAMPLES, &max_color_samples);
GLint max_depth_samples = 0;
glGetIntegerv(GL_MAX_COLOR_TEXTURE_SAMPLES, &max_depth_samples);
// Note: The desktop OpenGL ref pages don't actually say that GL_MAX_SAMPLES is a valid
// parameter for glGetIntegerv (though the ES ones do). However, MAX_SAMPLES is
// referenced in the GL 3.1 spec and by GL_ARB_texture_multisample (which is written against
// the OpenGL 3.1 spec), so presumably it is valid.
GLint max_samples = 0;
glGetIntegerv(GL_MAX_SAMPLES, &max_samples);
u32 supported_max_samples =
static_cast<u32>(std::min({max_samples, max_color_samples, max_depth_samples}));
while (supported_max_samples > 1)
{
g_Config.backend_info.AAModes.push_back(supported_max_samples);
supported_max_samples /= 2;
}
g_Config.backend_info.AAModes.push_back(1);
// The UI wants ascending order
std::reverse(g_Config.backend_info.AAModes.begin(), g_Config.backend_info.AAModes.end());
}
}
else
{
g_Config.backend_info.AAModes = {1};
}
const bool bSupportsIsHelperInvocation = g_ogl_config.bIsES ?
g_ogl_config.eSupportedGLSLVersion >= GlslEs320 :

View File

@ -81,8 +81,6 @@ struct VideoConfig
const char* gl_vendor;
const char* gl_renderer;
const char* gl_version;
s32 max_samples;
};
void InitDriverInfo();

View File

@ -146,12 +146,6 @@ OGLGfx::OGLGfx(std::unique_ptr<GLContext> main_gl_context, float backbuffer_scal
}
}
if (!PopulateConfig(m_main_gl_context.get()))
{
// Not all needed extensions are supported, so we have to stop here.
// Else some of the next calls might crash.
return;
}
InitDriverInfo();
// Setup Debug logging

View File

@ -142,7 +142,7 @@ bool VideoBackend::FillBackendInfo(GLContext* context)
g_Config.backend_info.bSupportsGPUTextureDecoding = true;
g_Config.backend_info.bSupportsBBox = true;
// Overwritten in OGLRender.cpp later
// Overwritten in OGLConfig.cpp later
g_Config.backend_info.bSupportsDualSourceBlend = true;
g_Config.backend_info.bSupportsPrimitiveRestart = true;
g_Config.backend_info.bSupportsPaletteConversion = true;
@ -182,6 +182,13 @@ bool VideoBackend::FillBackendInfo(GLContext* context)
return false;
}
if (!PopulateConfig(context))
{
// Not all needed extensions are supported, so we have to stop here.
// Else some of the next calls might crash.
return false;
}
// TODO: Move the remaining fields from the Renderer constructor here.
return true;
}

View File

@ -15,9 +15,7 @@
namespace OGL
{
namespace
{
GLenum GetGLInternalFormatForTextureFormat(AbstractTextureFormat format, bool storage)
GLenum OGLTexture::GetGLInternalFormatForTextureFormat(AbstractTextureFormat format, bool storage)
{
switch (format)
{
@ -55,6 +53,8 @@ GLenum GetGLInternalFormatForTextureFormat(AbstractTextureFormat format, bool st
}
}
namespace
{
GLenum GetGLFormatForTextureFormat(AbstractTextureFormat format)
{
switch (format)

View File

@ -36,6 +36,7 @@ public:
{
return IsMultisampled() ? GL_TEXTURE_2D_MULTISAMPLE_ARRAY : GL_TEXTURE_2D_ARRAY;
}
static GLenum GetGLInternalFormatForTextureFormat(AbstractTextureFormat format, bool storage);
GLenum GetGLFormatForImageTexture() const;
private: