Acquire an OGL3.2 core profile when available

This commit is contained in:
thelemonman 2014-04-23 18:14:20 +00:00
parent f4cf398962
commit 85c1c3c39d
5 changed files with 3301 additions and 7 deletions

View File

@ -173,7 +173,7 @@ libdesmume_a_SOURCES += \
endif
if HAVE_GL
libdesmume_a_SOURCES += OGLRender.cpp
libdesmume_a_SOURCES += OGLRender.cpp OGLRender_3_2.cpp
endif
if HAVE_OPENAL

View File

@ -39,8 +39,8 @@
#define EXTERNOGLEXT(procPtr, func)
#else
#include <GL/gl.h>
#include <GL/glcorearb.h>
#include <GL/glx.h>
#include "utils/glcorearb.h"
#define OGLEXT(procPtr, func) procPtr func = NULL;
#define INITOGLEXT(procPtr, func) func = (procPtr)glXGetProcAddress((const GLubyte *) #func);

View File

@ -22,15 +22,19 @@
#include <stdio.h>
#include <X11/Xlib.h>
#include <GL/glx.h>
#include <GL/glxext.h>
#include "../OGLRender.h"
static bool glx_beginOpenGL(void) { return 1; }
static void glx_endOpenGL(void) { }
static bool glx_init(void) { return true; }
static int xerror_handler(Display *dpy, XErrorEvent *ev) { return 0; }
static GLXContext ctx;
static GLXPbuffer pbuf;
typedef GLXContext (*wtf)(Display*, GLXFBConfig, GLXContext, Bool, const int*);
int deinit_glx_3Demu(void)
{
Display *dpy = glXGetCurrentDisplay();
@ -111,10 +115,32 @@ int init_glx_3Demu(void)
// The first should match exactly, otherwise is the least wrong one
pbuf = glXCreatePbuffer(dpy, cfg[0], (int *)&pbuf_attr);
XFree(cfg);
// Dynamic linking is a pain, sigh
OGLEXT(PFNGLXCREATECONTEXTATTRIBSARBPROC, glXCreateContextAttribsARB);
INITOGLEXT(PFNGLXCREATECONTEXTATTRIBSARBPROC, glXCreateContextAttribsARB);
// Try to get a 3.2 core profile context
if (glXCreateContextAttribsARB) {
const int ctx_attr[] = {
GLX_CONTEXT_PROFILE_MASK_ARB, GLX_CONTEXT_CORE_PROFILE_BIT_ARB,
GLX_CONTEXT_MAJOR_VERSION_ARB, 3,
GLX_CONTEXT_MINOR_VERSION_ARB, 2,
None
};
// This silly dance is needed because if Xorg can't acquire the context
// we asked for it will throw an error, which is caught by GTK X error
// handler and made fatal. The show must go on.
int (*old_handler)(Display*, XErrorEvent*) = XSetErrorHandler(&xerror_handler);
ctx = glXCreateContextAttribsARB(dpy, cfg[0], 0, true, ctx_attr);
XSetErrorHandler(old_handler);
}
// Something went wrong, try with a standard context
if (!ctx)
ctx = glXCreateContext(dpy, vis, NULL, true);
XFree(cfg);
if (!ctx)
return false;

View File

@ -70,6 +70,7 @@
#include <GL/gl.h>
#include <GL/glu.h>
#include "OGLRender.h"
#include "OGLRender_3_2.h"
#include "osmesa_3Demu.h"
#include "glx_3Demu.h"
#endif
@ -593,10 +594,9 @@ NULL
GPU3DInterface *core3DList[] = {
&gpu3DNull,
&gpu3DRasterize
&gpu3DRasterize,
#if defined(HAVE_LIBOSMESA) || defined(HAVE_GL_GLX)
,
&gpu3Dgl
&gpu3Dgl,
#endif
};
@ -3195,6 +3195,9 @@ common_gtk_main( class configured_features *my_config)
gtk_toggle_action_set_active((GtkToggleAction *)action, FALSE);
}
OGLLoadEntryPoints_3_2_Func = OGLLoadEntryPoints_3_2;
OGLCreateRenderer_3_2_Func = OGLCreateRenderer_3_2;
//Set the 3D emulation to use
unsigned core = my_config->engine_3d;
// setup the gdk 3D emulation;

File diff suppressed because it is too large Load Diff