2024-07-30 11:42:36 +00:00
|
|
|
// SPDX-FileCopyrightText: 2002-2024 PCSX2 Dev Team
|
|
|
|
// SPDX-License-Identifier: GPL-3.0+
|
2009-04-19 02:14:50 +00:00
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
|
|
|
// Implementations found here: CALL and JMP! (unconditional only)
|
2009-11-06 21:45:30 +00:00
|
|
|
|
2016-11-12 15:28:37 +00:00
|
|
|
namespace x86Emitter
|
|
|
|
{
|
2009-04-19 02:14:50 +00:00
|
|
|
|
2021-09-06 18:28:26 +00:00
|
|
|
extern void xJccKnownTarget(JccComparisonType comparison, const void* target, bool slideForward);
|
|
|
|
|
|
|
|
// ------------------------------------------------------------------------
|
|
|
|
struct xImpl_JmpCall
|
|
|
|
{
|
|
|
|
bool isJmp;
|
|
|
|
|
|
|
|
void operator()(const xAddressReg& absreg) const;
|
|
|
|
void operator()(const xIndirectNative& src) const;
|
|
|
|
|
|
|
|
// Special form for calling functions. This form automatically resolves the
|
|
|
|
// correct displacement based on the size of the instruction being generated.
|
2023-10-09 11:32:18 +00:00
|
|
|
void operator()(const void* func) const
|
2021-09-06 18:28:26 +00:00
|
|
|
{
|
|
|
|
if (isJmp)
|
2023-10-09 11:32:18 +00:00
|
|
|
xJccKnownTarget(Jcc_Unconditional, (const void*)(uptr)func, false); // double cast to/from (uptr) needed to appease GCC
|
2021-09-06 18:28:26 +00:00
|
|
|
else
|
|
|
|
{
|
|
|
|
// calls are relative to the instruction after this one, and length is
|
|
|
|
// always 5 bytes (16 bit calls are bad mojo, so no bother to do special logic).
|
|
|
|
|
|
|
|
sptr dest = (sptr)func - ((sptr)xGetPtr() + 5);
|
|
|
|
pxAssertMsg(dest == (s32)dest, "Indirect jump is too far, must use a register!");
|
|
|
|
xWrite8(0xe8);
|
|
|
|
xWrite32(dest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// yes it is awful. Due to template code is in a header with a nice circular dep.
|
|
|
|
extern const xImpl_Mov xMOV;
|
|
|
|
extern const xImpl_JmpCall xCALL;
|
|
|
|
|
|
|
|
struct xImpl_FastCall
|
|
|
|
{
|
|
|
|
// FIXME: current 64 bits is mostly a copy/past potentially it would require to push/pop
|
|
|
|
// some registers. But I think it is enough to handle the first call.
|
|
|
|
|
2023-10-09 11:32:18 +00:00
|
|
|
void operator()(const void* f, const xRegister32& a1 = xEmptyReg, const xRegister32& a2 = xEmptyReg) const;
|
2021-09-06 18:28:26 +00:00
|
|
|
|
2023-10-09 11:32:18 +00:00
|
|
|
void operator()(const void* f, u32 a1, const xRegister32& a2) const;
|
|
|
|
void operator()(const void* f, const xIndirect32& a1) const;
|
|
|
|
void operator()(const void* f, u32 a1, u32 a2) const;
|
|
|
|
void operator()(const void* f, void* a1) const;
|
2015-12-03 19:15:52 +00:00
|
|
|
|
2023-10-09 11:32:18 +00:00
|
|
|
void operator()(const void* f, const xRegisterLong& a1, const xRegisterLong& a2 = xEmptyReg) const;
|
|
|
|
void operator()(const void* f, u32 a1, const xRegisterLong& a2) const;
|
2015-12-03 19:15:52 +00:00
|
|
|
|
2021-09-06 18:28:26 +00:00
|
|
|
template <typename T>
|
|
|
|
__fi void operator()(T* func, u32 a1, const xRegisterLong& a2 = xEmptyReg) const
|
|
|
|
{
|
2023-10-09 11:32:18 +00:00
|
|
|
(*this)((const void*)func, a1, a2);
|
2021-09-06 18:28:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
__fi void operator()(T* func, const xIndirect32& a1) const
|
|
|
|
{
|
2023-10-09 11:32:18 +00:00
|
|
|
(*this)((const void*)func, a1);
|
2021-09-06 18:28:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
__fi void operator()(T* func, u32 a1, u32 a2) const
|
|
|
|
{
|
2023-10-09 11:32:18 +00:00
|
|
|
(*this)((const void*)func, a1, a2);
|
2021-09-06 18:28:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void operator()(const xIndirectNative& f, const xRegisterLong& a1 = xEmptyReg, const xRegisterLong& a2 = xEmptyReg) const;
|
|
|
|
};
|
2015-12-03 19:15:52 +00:00
|
|
|
|
2016-11-12 15:28:37 +00:00
|
|
|
} // End namespace x86Emitter
|