From 307e0182af944549c82738e518392ab3d5ced553 Mon Sep 17 00:00:00 2001 From: zeromus Date: Wed, 3 Aug 2011 00:29:02 +0000 Subject: [PATCH] gpu: fix 2 years old regression in blending of backdrop --- desmume/src/GPU.cpp | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/desmume/src/GPU.cpp b/desmume/src/GPU.cpp index 9461f829a..c16984f69 100644 --- a/desmume/src/GPU.cpp +++ b/desmume/src/GPU.cpp @@ -2039,19 +2039,13 @@ static void GPU_RenderLine_layer(NDS_Screen * screen, u16 l) //this is currently eating up 2fps or so. it is a reasonable candidate for optimization. gpu->currBgNum = 5; switch(gpu->setFinalColorBck_funcNum) { - case 0: case 1: //for backdrops, (even with window enabled) none and blend are both the same: just copy the color + //for backdrops, effects arent applied. + case 0: case 1: + case 2: case 3: memset_u16_le<256>(gpu->currDst,backdrop_color); break; - case 2: - //for non-windowed fade, we can just fade the color and fill - memset_u16_le<256>(gpu->currDst,gpu->currentFadeInColors[backdrop_color]); - break; - case 3: - //likewise for non-windowed fadeout - memset_u16_le<256>(gpu->currDst,gpu->currentFadeOutColors[backdrop_color]); - break; - //windowed fades need special treatment + //windowed cases apparently need special treatment? why? can we not render the backdrop? how would that even work? case 4: for(int x=0;x<256;x++) gpu->___setFinalColorBck(backdrop_color,x,1); break; case 5: for(int x=0;x<256;x++) gpu->___setFinalColorBck(backdrop_color,x,1); break; case 6: for(int x=0;x<256;x++) gpu->___setFinalColorBck(backdrop_color,x,1); break;