[GPU] Added janky(?) support for register: D1MODE_V_COUNTER

Put internal_display_resolution into graphics_system

Thanks Beeanyew for initial implementation
This commit is contained in:
Gliniak 2024-08-31 23:37:21 +02:00
parent 4452e300ac
commit ed843f35f8
6 changed files with 50 additions and 42 deletions

View File

@ -13,6 +13,7 @@ project("xenia-cpu-ppc-tests")
"imgui",
"xenia-core",
"xenia-cpu",
"xenia-gpu",
"xenia-base",
"xenia-kernel",
"xenia-patcher",

View File

@ -9,6 +9,7 @@ test_suite("xenia-cpu-tests", project_root, ".", {
"xenia-base",
"xenia-core",
"xenia-cpu",
"xenia-gpu",
-- TODO(benvanik): cut these dependencies?
"xenia-kernel",

View File

@ -29,6 +29,29 @@
#include "xenia/ui/graphics_provider.h"
#include "xenia/ui/window.h"
#include "xenia/ui/windowed_app_context.h"
DEFINE_int32(internal_display_resolution, 8,
"Allow game that support different resolutions to be rendered "
"in specific resolution.\n"
" 0=640x480\n"
" 1=640x576\n"
" 2=720x480\n"
" 3=720x576\n"
" 4=800x600\n"
" 5=848x480\n"
" 6=1024x768\n"
" 7=1152x864\n"
" 8=1280x720 (Default)\n"
" 9=1280x768\n"
" 10=1280x960\n"
" 11=1280x1024\n"
" 12=1360x768\n"
" 13=1440x900\n"
" 14=1680x1050\n"
" 15=1920x540\n"
" 16=1920x1080\n",
"Video");
DEFINE_bool(
store_shaders, true,
"Store shaders persistently and load them when loading games to avoid "
@ -128,6 +151,9 @@ X_STATUS GraphicsSystem::Setup(cpu::Processor* processor,
const double duration_scalar = 0.90;
while (frame_limiter_worker_running_) {
register_file()->values[XE_GPU_REG_D1MODE_V_COUNTER] +=
GetInternalDisplayResolution().second;
if (cvars::vsync) {
const uint64_t current_time = Clock::QueryGuestTickCount();
const uint64_t tick_freq = Clock::guest_tick_frequency();
@ -246,8 +272,6 @@ uint32_t GraphicsSystem::ReadRegister(uint32_t addr) {
return 0x08100748;
case 0x0F01: // RB_BC_CONTROL
return 0x0000200E;
case 0x194C: // R500_D1MODE_V_COUNTER
return 0x000002D0;
case 0x1951: // interrupt status
return 1; // vblank
case 0x1961: // AVIVO_D1MODE_VIEWPORT_SIZE
@ -381,5 +405,14 @@ bool GraphicsSystem::Restore(ByteStream* stream) {
return command_processor_->Restore(stream);
}
std::pair<uint16_t, uint16_t> GraphicsSystem::GetInternalDisplayResolution() {
if (cvars::internal_display_resolution >
internal_display_resolution_entries.size()) {
return internal_display_resolution_entries[8];
}
return internal_display_resolution_entries
[cvars::internal_display_resolution];
}
} // namespace gpu
} // namespace xe

View File

@ -34,6 +34,13 @@ class Emulator;
namespace xe {
namespace gpu {
static const std::vector<std::pair<uint16_t, uint16_t>>
internal_display_resolution_entries = {
{640, 480}, {640, 576}, {720, 480}, {720, 576}, {800, 600},
{848, 480}, {1024, 768}, {1152, 864}, {1280, 720}, {1280, 768},
{1280, 960}, {1280, 1024}, {1360, 768}, {1440, 900}, {1680, 1050},
{1920, 540}, {1920, 1080}};
class CommandProcessor;
class GraphicsSystem {
@ -86,6 +93,8 @@ class GraphicsSystem {
bool Save(ByteStream* stream);
bool Restore(ByteStream* stream);
static std::pair<uint16_t, uint16_t> GetInternalDisplayResolution();
std::pair<uint32_t, uint32_t> GetScaledAspectRatio() const {
return {scaled_aspect_x_, scaled_aspect_y_};
};

View File

@ -479,6 +479,8 @@ XE_GPU_REGISTER(0x1927, kDword, DC_LUT_WRITE_EN_MASK)
// Default: 0x00000000.
XE_GPU_REGISTER(0x1930, kDword, DC_LUTA_CONTROL)
XE_GPU_REGISTER(0x194C, kDword, D1MODE_V_COUNTER)
XE_GPU_REGISTER(0x1961, kDword, AVIVO_D1MODE_VIEWPORT_SIZE)
XE_GPU_REGISTER(0x1964, kDword, AVIVO_D1SCL_SCALER_ENABLE)

View File

@ -20,28 +20,6 @@
#include "xenia/kernel/xboxkrnl/xboxkrnl_rtl.h"
#include "xenia/xbox.h"
DEFINE_int32(internal_display_resolution, 8,
"Allow game that support different resolutions to be rendered "
"in specific resolution.\n"
" 0=640x480\n"
" 1=640x576\n"
" 2=720x480\n"
" 3=720x576\n"
" 4=800x600\n"
" 5=848x480\n"
" 6=1024x768\n"
" 7=1152x864\n"
" 8=1280x720 (Default)\n"
" 9=1280x768\n"
" 10=1280x960\n"
" 11=1280x1024\n"
" 12=1360x768\n"
" 13=1440x900\n"
" 14=1680x1050\n"
" 15=1920x540\n"
" 16=1920x1080\n",
"Video");
DEFINE_int32(
video_standard, 1,
"Enables switching between different video signals.\n 1=NTSC\n "
@ -66,22 +44,6 @@ DEFINE_double(kernel_display_gamma_power, 2.22222233,
"Display gamma to use with kernel_display_gamma_type 3.",
"Kernel");
static const std::vector<std::pair<uint16_t, uint16_t>>
internal_display_resolution_entries = {
{640, 480}, {640, 576}, {720, 480}, {720, 576}, {800, 600},
{848, 480}, {1024, 768}, {1152, 864}, {1280, 720}, {1280, 768},
{1280, 960}, {1280, 1024}, {1360, 768}, {1440, 900}, {1680, 1050},
{1920, 540}, {1920, 1080}};
std::pair<uint16_t, uint16_t> GetInternalDisplayResolution() {
if (cvars::internal_display_resolution >
internal_display_resolution_entries.size()) {
return internal_display_resolution_entries[8];
}
return internal_display_resolution_entries
[cvars::internal_display_resolution];
}
inline constexpr static uint32_t GetVideoStandard() {
if (cvars::video_standard < 1 || cvars::video_standard > 3) {
return 1;
@ -110,7 +72,7 @@ static std::pair<uint32_t, uint32_t> CalculateScaledAspectRatio(uint32_t fb_x,
uint32_t display_x = dar.first;
uint32_t display_y = dar.second;
auto res = GetInternalDisplayResolution();
auto res = xe::gpu::GraphicsSystem::GetInternalDisplayResolution();
uint32_t res_x = res.first;
uint32_t res_y = res.second;
@ -244,7 +206,7 @@ void VdQueryVideoMode(X_VIDEO_MODE* video_mode) {
// TODO(benvanik): get info from actual display.
std::memset(video_mode, 0, sizeof(X_VIDEO_MODE));
auto display_res = GetInternalDisplayResolution();
auto display_res = gpu::GraphicsSystem::GetInternalDisplayResolution();
video_mode->display_width = display_res.first;
video_mode->display_height = display_res.second;