2013-04-09 13:31:46 +00:00
|
|
|
#if defined(PLATFORM_X)
|
|
|
|
#include <GL/gl.h>
|
|
|
|
#include <GL/glx.h>
|
|
|
|
#define glGetProcAddress(name) (*glXGetProcAddress)((const GLubyte*)(name))
|
Update to v093r02 release.
byuu says:
Changelog:
- nall: fixed major memory leak in string class
- ruby: video shaders support #define-based settings now
- phoenix/GTK+: support > 256x256 icons for window / task bar / alt-tab
- sfc: remove random/ and config/, merge into system/
- ethos: delete higan.png (48x48), replace with higan512.png (512x512)
as new higan.png
- ethos: default gamma to 100% (no color adjustment)
- ethos: use "Video Shaders/Display Emulation/" instead of "Video
Shaders/Emulation/"
- use g++ instead of g++-4.7 (g++ -v must be >= 4.7)
- use -std=c++11 instead of -std=gnu++11
- applied a few patches from Debian upstream to make their packaging job
easier
So because colors are normalized in GLSL, I won't be able to offer video
shaders absolute color literals. We will have to perform basic color
conversion inside the core.
As such, the current plan is to create some sort of Emulator::Settings
interface. With that, I'll connect an option for color correction, which
will be on by default. For FC/SFC, that will mean gamma correction
(darker / stronger colors), and for GB/GBC/GBA, it will mean simulating
the weird brightness levels of the displays. I am undecided on whether
to use pea soup green for the GB or not. By not doing so, it'll be
easier for the display emulation shader to do it.
2013-11-09 11:45:54 +00:00
|
|
|
#elif defined(PLATFORM_MACOSX)
|
2013-04-09 13:31:46 +00:00
|
|
|
#include <OpenGL/gl.h>
|
2013-04-14 08:52:47 +00:00
|
|
|
#include <OpenGL/gl3.h>
|
Update to v093r02 release.
byuu says:
Changelog:
- nall: fixed major memory leak in string class
- ruby: video shaders support #define-based settings now
- phoenix/GTK+: support > 256x256 icons for window / task bar / alt-tab
- sfc: remove random/ and config/, merge into system/
- ethos: delete higan.png (48x48), replace with higan512.png (512x512)
as new higan.png
- ethos: default gamma to 100% (no color adjustment)
- ethos: use "Video Shaders/Display Emulation/" instead of "Video
Shaders/Emulation/"
- use g++ instead of g++-4.7 (g++ -v must be >= 4.7)
- use -std=c++11 instead of -std=gnu++11
- applied a few patches from Debian upstream to make their packaging job
easier
So because colors are normalized in GLSL, I won't be able to offer video
shaders absolute color literals. We will have to perform basic color
conversion inside the core.
As such, the current plan is to create some sort of Emulator::Settings
interface. With that, I'll connect an option for color correction, which
will be on by default. For FC/SFC, that will mean gamma correction
(darker / stronger colors), and for GB/GBC/GBA, it will mean simulating
the weird brightness levels of the displays. I am undecided on whether
to use pea soup green for the GB or not. By not doing so, it'll be
easier for the display emulation shader to do it.
2013-11-09 11:45:54 +00:00
|
|
|
#elif defined(PLATFORM_WINDOWS)
|
2013-04-09 13:31:46 +00:00
|
|
|
#include <GL/gl.h>
|
|
|
|
#include <GL/glext.h>
|
|
|
|
#define glGetProcAddress(name) wglGetProcAddress(name)
|
|
|
|
#else
|
|
|
|
#error "ruby::OpenGL: unsupported platform"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
namespace ruby {
|
|
|
|
|
|
|
|
#include "bind.hpp"
|
|
|
|
#include "shaders.hpp"
|
|
|
|
#include "utility.hpp"
|
|
|
|
|
|
|
|
struct OpenGL;
|
|
|
|
|
|
|
|
struct OpenGLTexture {
|
|
|
|
GLuint texture = 0;
|
|
|
|
unsigned width = 0;
|
|
|
|
unsigned height = 0;
|
|
|
|
GLuint format = GL_RGBA8;
|
|
|
|
GLuint filter = GL_LINEAR;
|
|
|
|
GLuint wrap = GL_CLAMP_TO_BORDER;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct OpenGLSurface : OpenGLTexture {
|
|
|
|
GLuint program = 0;
|
|
|
|
GLuint framebuffer = 0;
|
2013-04-14 08:52:47 +00:00
|
|
|
GLuint vao = 0;
|
|
|
|
GLuint vbo[3] = {0, 0, 0};
|
2013-04-09 13:31:46 +00:00
|
|
|
GLuint vertex = 0;
|
|
|
|
GLuint geometry = 0;
|
|
|
|
GLuint fragment = 0;
|
2013-05-02 11:25:45 +00:00
|
|
|
uint32_t* buffer = nullptr;
|
2013-04-09 13:31:46 +00:00
|
|
|
|
2013-04-14 08:52:47 +00:00
|
|
|
void allocate();
|
2013-04-09 13:31:46 +00:00
|
|
|
void size(unsigned width, unsigned height);
|
|
|
|
void release();
|
2013-04-14 08:52:47 +00:00
|
|
|
void render(unsigned sourceWidth, unsigned sourceHeight, unsigned targetWidth, unsigned targetHeight);
|
2013-04-09 13:31:46 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct OpenGLProgram : OpenGLSurface {
|
|
|
|
//configuration
|
|
|
|
unsigned phase = 0; //frame counter
|
|
|
|
unsigned modulo = 0; //frame counter modulus
|
|
|
|
unsigned absoluteWidth = 0;
|
|
|
|
unsigned absoluteHeight = 0;
|
|
|
|
double relativeWidth = 0;
|
|
|
|
double relativeHeight = 0;
|
2013-04-14 08:52:47 +00:00
|
|
|
vector<OpenGLTexture> pixmaps;
|
2013-04-09 13:31:46 +00:00
|
|
|
|
2013-05-02 11:25:45 +00:00
|
|
|
void bind(OpenGL* instance, const Markup::Node& node, const string& pathname);
|
Update to v093r02 release.
byuu says:
Changelog:
- nall: fixed major memory leak in string class
- ruby: video shaders support #define-based settings now
- phoenix/GTK+: support > 256x256 icons for window / task bar / alt-tab
- sfc: remove random/ and config/, merge into system/
- ethos: delete higan.png (48x48), replace with higan512.png (512x512)
as new higan.png
- ethos: default gamma to 100% (no color adjustment)
- ethos: use "Video Shaders/Display Emulation/" instead of "Video
Shaders/Emulation/"
- use g++ instead of g++-4.7 (g++ -v must be >= 4.7)
- use -std=c++11 instead of -std=gnu++11
- applied a few patches from Debian upstream to make their packaging job
easier
So because colors are normalized in GLSL, I won't be able to offer video
shaders absolute color literals. We will have to perform basic color
conversion inside the core.
As such, the current plan is to create some sort of Emulator::Settings
interface. With that, I'll connect an option for color correction, which
will be on by default. For FC/SFC, that will mean gamma correction
(darker / stronger colors), and for GB/GBC/GBA, it will mean simulating
the weird brightness levels of the displays. I am undecided on whether
to use pea soup green for the GB or not. By not doing so, it'll be
easier for the display emulation shader to do it.
2013-11-09 11:45:54 +00:00
|
|
|
void parse(OpenGL* instance, string& source);
|
2013-04-09 13:31:46 +00:00
|
|
|
void release();
|
|
|
|
};
|
|
|
|
|
|
|
|
struct OpenGL : OpenGLProgram {
|
|
|
|
vector<OpenGLProgram> programs;
|
Update to v093r01 release.
byuu says:
Changelog:
- added SA-1 MDR; fixes bug in SD Gundam G-Next where the main
battleship was unable to fire
- added out-of-the-box support for any BSD running Clang 3.3+ (FreeBSD
10+, notably)
- added new video shader, "Display Emulation", which changes the shader
based on the emulated system
- fixed the home button to go to your default library path
- phoenix: Windows port won't send onActivate unless an item is selected
(prevents crashing on pressing enter in file dialog)
- ruby: removed vec4 position from out Vertex {} (helps AMD cards)
- shaders: updated all shaders to use texture() instead of texture2D()
(helps AMD cards)
The "Display Emulation" option works like this: when selected, it tries
to load "<path>/Video Shaders/Emulation/<systemName>.shader/"; otherwise
it falls back to the blur shader. <path> is the usual (next to binary,
then in <config>/higan, then in /usr/share/higan, etc); and <systemName>
is "Famicom", "Super Famicom", "Game Boy", "Game Boy Color", "Game Boy
Advance"
To support BSD, I had to modify the $(platform) variable to
differentiate between Linux and BSD.
As such, the new $(platform) values are:
win -> windows
osx -> macosx
x -> linux or bsd
I am also checking uname -s instead of uname -a now. No reason to
potentially match the hostname to the wrong OS type.
2013-10-21 11:45:39 +00:00
|
|
|
vector<OpenGLTexture> frames;
|
|
|
|
struct History {
|
|
|
|
unsigned length = 0;
|
|
|
|
GLuint format = GL_RGBA8;
|
|
|
|
GLuint filter = GL_LINEAR;
|
|
|
|
GLuint wrap = GL_CLAMP_TO_BORDER;
|
|
|
|
} history;
|
2013-04-09 13:31:46 +00:00
|
|
|
|
|
|
|
GLuint inputFormat = GL_UNSIGNED_INT_8_8_8_8_REV;
|
|
|
|
unsigned outputWidth = 0;
|
|
|
|
unsigned outputHeight = 0;
|
|
|
|
|
Update to v093r02 release.
byuu says:
Changelog:
- nall: fixed major memory leak in string class
- ruby: video shaders support #define-based settings now
- phoenix/GTK+: support > 256x256 icons for window / task bar / alt-tab
- sfc: remove random/ and config/, merge into system/
- ethos: delete higan.png (48x48), replace with higan512.png (512x512)
as new higan.png
- ethos: default gamma to 100% (no color adjustment)
- ethos: use "Video Shaders/Display Emulation/" instead of "Video
Shaders/Emulation/"
- use g++ instead of g++-4.7 (g++ -v must be >= 4.7)
- use -std=c++11 instead of -std=gnu++11
- applied a few patches from Debian upstream to make their packaging job
easier
So because colors are normalized in GLSL, I won't be able to offer video
shaders absolute color literals. We will have to perform basic color
conversion inside the core.
As such, the current plan is to create some sort of Emulator::Settings
interface. With that, I'll connect an option for color correction, which
will be on by default. For FC/SFC, that will mean gamma correction
(darker / stronger colors), and for GB/GBC/GBA, it will mean simulating
the weird brightness levels of the displays. I am undecided on whether
to use pea soup green for the GB or not. By not doing so, it'll be
easier for the display emulation shader to do it.
2013-11-09 11:45:54 +00:00
|
|
|
struct Setting {
|
|
|
|
string name;
|
|
|
|
string value;
|
|
|
|
bool operator< (const Setting& source) { return name < source.name; }
|
|
|
|
bool operator==(const Setting& source) { return name == source.name; }
|
|
|
|
Setting() {}
|
|
|
|
Setting(const string& name) : name(name) {}
|
|
|
|
Setting(const string& name, const string& value) : name(name), value(value) {}
|
|
|
|
};
|
|
|
|
set<Setting> settings;
|
|
|
|
|
2013-05-02 11:25:45 +00:00
|
|
|
void shader(const char* pathname);
|
Update to v093r01 release.
byuu says:
Changelog:
- added SA-1 MDR; fixes bug in SD Gundam G-Next where the main
battleship was unable to fire
- added out-of-the-box support for any BSD running Clang 3.3+ (FreeBSD
10+, notably)
- added new video shader, "Display Emulation", which changes the shader
based on the emulated system
- fixed the home button to go to your default library path
- phoenix: Windows port won't send onActivate unless an item is selected
(prevents crashing on pressing enter in file dialog)
- ruby: removed vec4 position from out Vertex {} (helps AMD cards)
- shaders: updated all shaders to use texture() instead of texture2D()
(helps AMD cards)
The "Display Emulation" option works like this: when selected, it tries
to load "<path>/Video Shaders/Emulation/<systemName>.shader/"; otherwise
it falls back to the blur shader. <path> is the usual (next to binary,
then in <config>/higan, then in /usr/share/higan, etc); and <systemName>
is "Famicom", "Super Famicom", "Game Boy", "Game Boy Color", "Game Boy
Advance"
To support BSD, I had to modify the $(platform) variable to
differentiate between Linux and BSD.
As such, the new $(platform) values are:
win -> windows
osx -> macosx
x -> linux or bsd
I am also checking uname -s instead of uname -a now. No reason to
potentially match the hostname to the wrong OS type.
2013-10-21 11:45:39 +00:00
|
|
|
void bind(const Markup::Node& node, const string& pathname);
|
2013-05-02 11:25:45 +00:00
|
|
|
bool lock(uint32_t*& data, unsigned& pitch);
|
2013-04-09 13:31:46 +00:00
|
|
|
void clear();
|
|
|
|
void refresh();
|
|
|
|
bool init();
|
|
|
|
void term();
|
|
|
|
};
|
|
|
|
|
|
|
|
#include "surface.hpp"
|
|
|
|
#include "program.hpp"
|
|
|
|
#include "main.hpp"
|
|
|
|
|
|
|
|
}
|