From ff9231bb3470bf97fafe4e274634dd7df43165e2 Mon Sep 17 00:00:00 2001 From: rogerman Date: Mon, 15 Jul 2024 23:09:53 -0700 Subject: [PATCH] OpenGL Renderer: 16-bit texture data that is converted to 32-bit is now byte-swapped correctly on big endian systems. --- desmume/src/OGLRender.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/desmume/src/OGLRender.cpp b/desmume/src/OGLRender.cpp index b0ac1dcd0..16f394209 100644 --- a/desmume/src/OGLRender.cpp +++ b/desmume/src/OGLRender.cpp @@ -3942,7 +3942,7 @@ Render3DError OpenGLRenderer_1_2::UploadClearImage(const u16 *__restrict colorBu if (OGL_TEXTURE_SRC_CI_COLOR == GL_UNSIGNED_BYTE) { - ColorspaceConvertBuffer5551To8888(OGLRef.workingCIColorBuffer16, OGLRef.workingCIColorBuffer32, GPU_FRAMEBUFFER_NATIVE_WIDTH * GPU_FRAMEBUFFER_NATIVE_HEIGHT); + ColorspaceConvertBuffer5551To8888(OGLRef.workingCIColorBuffer16, OGLRef.workingCIColorBuffer32, GPU_FRAMEBUFFER_NATIVE_WIDTH * GPU_FRAMEBUFFER_NATIVE_HEIGHT); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, GPU_FRAMEBUFFER_NATIVE_WIDTH, GPU_FRAMEBUFFER_NATIVE_HEIGHT, GL_RGBA, OGL_TEXTURE_SRC_CI_COLOR, OGLRef.workingCIColorBuffer32); } else @@ -4547,7 +4547,7 @@ Render3DError OpenGLRenderer_1_2::BeginRender(const GFX3D_State &renderState, co if (OGL_TEXTURE_SRC_TOON_TABLE == GL_UNSIGNED_BYTE) { - ColorspaceConvertBuffer5551To8888(renderState.toonTable16, OGLRef.toonTable32, 32); + ColorspaceConvertBuffer5551To8888(renderState.toonTable16, OGLRef.toonTable32, 32); glTexSubImage1D(GL_TEXTURE_1D, 0, 0, 32, GL_RGBA, OGL_TEXTURE_SRC_TOON_TABLE, OGLRef.toonTable32); } else @@ -5781,7 +5781,7 @@ Render3DError OpenGLRenderer_2_0::BeginRender(const GFX3D_State &renderState, co if (OGL_TEXTURE_SRC_TOON_TABLE == GL_UNSIGNED_BYTE) { - ColorspaceConvertBuffer5551To8888(renderState.toonTable16, OGLRef.toonTable32, 32); + ColorspaceConvertBuffer5551To8888(renderState.toonTable16, OGLRef.toonTable32, 32); glTexSubImage1D(GL_TEXTURE_1D, 0, 0, 32, GL_RGBA, OGL_TEXTURE_SRC_TOON_TABLE, OGLRef.toonTable32); } else