NamcoGunCon: Use correct GPU clock as reference

This commit is contained in:
Connor McLaughlin 2020-07-02 02:51:22 +10:00
parent 06b329b4c6
commit 8a8ce8d41c
3 changed files with 10 additions and 1 deletions

View File

@ -393,6 +393,11 @@ void GPU::DMAWrite(const u32* words, u32 word_count)
* PAL - sysclk * 709379 / 451584
*/
TickCount GPU::GetCRTCFrequency() const
{
return m_console_is_pal ? 53203425 : 53693175;
}
TickCount GPU::CRTCTicksToSystemTicks(TickCount gpu_ticks, TickCount fractional_ticks) const
{
// convert to master clock, rounding up as we want to overshoot not undershoot

View File

@ -173,6 +173,9 @@ public:
// Converts window coordinates into horizontal ticks and scanlines. Returns false if out of range. Used for lightguns.
bool ConvertScreenCoordinatesToBeamTicksAndLines(s32 window_x, s32 window_y, u32* out_tick, u32* out_line) const;
// Returns the video clock frequency.
TickCount GetCRTCFrequency() const;
protected:
TickCount CRTCTicksToSystemTicks(TickCount crtc_ticks, TickCount fractional_ticks) const;
TickCount SystemTicksToCRTCTicks(TickCount sysclk_ticks, TickCount* fractional_ticks) const;

View File

@ -169,7 +169,8 @@ void NamcoGunCon::UpdatePosition()
}
// 8MHz units for X = 44100*768*11/7 = 53222400 / 8000000 = 6.6528
m_position_x = static_cast<u16>(static_cast<float>(tick) * (1.0f / 6.6528f));
const double divider = static_cast<double>(m_system->GetGPU()->GetCRTCFrequency()) / 8000000.0;
m_position_x = static_cast<u16>(static_cast<float>(tick) / static_cast<float>(divider));
m_position_y = static_cast<u16>(line);
Log_DebugPrintf("Lightgun window coordinates %d,%d -> tick %u line %u 8mhz ticks %u", mouse_x, mouse_y, tick, line,
m_position_x);