JIT: simplify and optimize memcheck macros
Instead of jumping over update code and similar, just jump directly to the handler. This avoids redundant exception checks in the case where we can't do fastmem memory operations (e.g. paired loadstore).
This commit is contained in:
parent
6dc7cf29f3
commit
9923d705df
|
@ -614,6 +614,7 @@ const u8* Jit64::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBloc
|
|||
const GekkoOPInfo *opinfo = ops[i].opinfo;
|
||||
js.downcountAmount += opinfo->numCycles;
|
||||
js.fastmemLoadStore = NULL;
|
||||
js.fixupExceptionHandler = false;
|
||||
|
||||
if (i == (code_block.m_num_instructions - 1))
|
||||
{
|
||||
|
@ -767,7 +768,9 @@ const u8* Jit64::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBloc
|
|||
{
|
||||
// If we have a fastmem loadstore, we can omit the exception check and let fastmem handle it.
|
||||
FixupBranch memException;
|
||||
if (!js.fastmemLoadStore)
|
||||
_assert_msg_(DYNA_REC, !(js.fastmemLoadStore && js.fixupExceptionHandler),
|
||||
"Fastmem loadstores shouldn't have exception handler fixups (PC=%x)!", ops[i].address);
|
||||
if (!js.fastmemLoadStore && !js.fixupExceptionHandler)
|
||||
{
|
||||
TEST(32, PPCSTATE(Exceptions), Imm32(EXCEPTION_DSI));
|
||||
memException = J_CC(CC_NZ, true);
|
||||
|
@ -777,7 +780,7 @@ const u8* Jit64::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBloc
|
|||
if (!js.fastmemLoadStore)
|
||||
{
|
||||
exceptionHandlerAtLoc[js.fastmemLoadStore] = NULL;
|
||||
SetJumpTarget(memException);
|
||||
SetJumpTarget(js.fixupExceptionHandler ? js.exceptionHandler : memException);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
@ -258,17 +258,15 @@ void Jit64::lXXx(UGeckoInstruction inst)
|
|||
|
||||
if (update && storeAddress)
|
||||
{
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
MOV(32, gpr.R(a), opAddress);
|
||||
MEMCHECK_END
|
||||
}
|
||||
|
||||
// TODO: support no-swap in SafeLoadToReg instead
|
||||
if (byte_reversed)
|
||||
{
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
BSWAP(accessSize, gpr.RX(d));
|
||||
MEMCHECK_END
|
||||
}
|
||||
|
||||
gpr.UnlockAll();
|
||||
|
@ -372,9 +370,8 @@ void Jit64::stX(UGeckoInstruction inst)
|
|||
else
|
||||
{
|
||||
gpr.KillImmediate(a, true, true);
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
ADD(32, gpr.R(a), Imm32((u32)offset));
|
||||
MEMCHECK_END
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -404,9 +401,8 @@ void Jit64::stX(UGeckoInstruction inst)
|
|||
|
||||
if (update)
|
||||
{
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
ADD(32, gpr.R(a), Imm32((u32)offset));
|
||||
MEMCHECK_END
|
||||
}
|
||||
}
|
||||
gpr.UnlockAll();
|
||||
|
@ -485,9 +481,8 @@ void Jit64::stXx(UGeckoInstruction inst)
|
|||
|
||||
if (update)
|
||||
{
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
MOV(32, gpr.R(a), R(RSCRATCH2));
|
||||
MEMCHECK_END;
|
||||
}
|
||||
|
||||
gpr.UnlockAll();
|
||||
|
|
|
@ -72,7 +72,7 @@ void Jit64::lfXXX(UGeckoInstruction inst)
|
|||
registersInUse[RSCRATCH2] = true;
|
||||
SafeLoadToReg(RSCRATCH, addr, single ? 32 : 64, offset, registersInUse, false);
|
||||
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
if (single)
|
||||
{
|
||||
ConvertSingleToDouble(fpr.RX(d), RSCRATCH, true);
|
||||
|
@ -84,7 +84,6 @@ void Jit64::lfXXX(UGeckoInstruction inst)
|
|||
}
|
||||
if (update && js.memcheck)
|
||||
MOV(32, gpr.R(a), addr);
|
||||
MEMCHECK_END
|
||||
fpr.UnlockAll();
|
||||
gpr.UnlockAll();
|
||||
}
|
||||
|
@ -141,9 +140,8 @@ void Jit64::stfXXX(UGeckoInstruction inst)
|
|||
else
|
||||
{
|
||||
gpr.KillImmediate(a, true, true);
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
ADD(32, gpr.R(a), Imm32((u32)imm));
|
||||
MEMCHECK_END
|
||||
}
|
||||
}
|
||||
fpr.UnlockAll();
|
||||
|
@ -187,9 +185,8 @@ void Jit64::stfXXX(UGeckoInstruction inst)
|
|||
|
||||
if (update)
|
||||
{
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
MOV(32, gpr.R(a), R(RSCRATCH2));
|
||||
MEMCHECK_END
|
||||
}
|
||||
|
||||
fpr.UnlockAll();
|
||||
|
|
|
@ -78,12 +78,11 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
|
|||
|
||||
if (update && js.memcheck)
|
||||
{
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
if (indexed)
|
||||
ADD(32, gpr.R(a), gpr.R(b));
|
||||
else
|
||||
ADD(32, gpr.R(a), Imm32((u32)offset));
|
||||
MEMCHECK_END
|
||||
}
|
||||
gpr.UnlockAll();
|
||||
gpr.UnlockAllX();
|
||||
|
@ -137,7 +136,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
|||
|
||||
CALLptr(MScaled(RSCRATCH, SCALE_8, (u32)(u64)(&asm_routines.pairedLoadQuantized[w * 8])));
|
||||
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
CVTPS2PD(fpr.RX(s), R(XMM0));
|
||||
if (update && js.memcheck)
|
||||
{
|
||||
|
@ -146,7 +145,6 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
|||
else
|
||||
ADD(32, gpr.R(a), Imm32((u32)offset));
|
||||
}
|
||||
MEMCHECK_END
|
||||
|
||||
gpr.UnlockAll();
|
||||
gpr.UnlockAllX();
|
||||
|
|
|
@ -73,7 +73,12 @@ protected:
|
|||
int downcountAmount;
|
||||
u32 numLoadStoreInst;
|
||||
u32 numFloatingPointInst;
|
||||
// If this is set, we need to generate an exception handler for the fastmem load.
|
||||
u8* fastmemLoadStore;
|
||||
// If this is set, a load or store already prepared a jump to the exception handler for us,
|
||||
// so just fixup that branch instead of testing for a DSI again.
|
||||
bool fixupExceptionHandler;
|
||||
Gen::FixupBranch exceptionHandler;
|
||||
|
||||
bool firstFPInstructionFound;
|
||||
bool isLastInstruction;
|
||||
|
|
|
@ -13,6 +13,16 @@
|
|||
|
||||
using namespace Gen;
|
||||
|
||||
void EmuCodeBlock::MemoryExceptionCheck()
|
||||
{
|
||||
if (jit->js.memcheck && !jit->js.fastmemLoadStore && !jit->js.fixupExceptionHandler)
|
||||
{
|
||||
TEST(32, PPCSTATE(Exceptions), Gen::Imm32(EXCEPTION_DSI));
|
||||
jit->js.exceptionHandler = J_CC(Gen::CC_NZ, true);
|
||||
jit->js.fixupExceptionHandler = true;
|
||||
}
|
||||
}
|
||||
|
||||
void EmuCodeBlock::LoadAndSwap(int size, Gen::X64Reg dst, const Gen::OpArg& src)
|
||||
{
|
||||
if (cpu_info.bMOVBE)
|
||||
|
@ -350,7 +360,7 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg & opAddress,
|
|||
}
|
||||
ABI_PopRegistersAndAdjustStack(registersInUse, 0);
|
||||
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
if (signExtend && accessSize < 32)
|
||||
{
|
||||
// Need to sign extend values coming from the Read_U* functions.
|
||||
|
@ -360,7 +370,6 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg & opAddress,
|
|||
{
|
||||
MOVZX(64, accessSize, reg_value, R(ABI_RETURN));
|
||||
}
|
||||
MEMCHECK_END
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -400,7 +409,7 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg & opAddress,
|
|||
}
|
||||
ABI_PopRegistersAndAdjustStack(registersInUse, rsp_alignment);
|
||||
|
||||
MEMCHECK_START
|
||||
MemoryExceptionCheck();
|
||||
if (signExtend && accessSize < 32)
|
||||
{
|
||||
// Need to sign extend values coming from the Read_U* functions.
|
||||
|
@ -410,7 +419,6 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg & opAddress,
|
|||
{
|
||||
MOVZX(64, accessSize, reg_value, R(ABI_RETURN));
|
||||
}
|
||||
MEMCHECK_END
|
||||
|
||||
if (farcode.Enabled())
|
||||
{
|
||||
|
|
|
@ -12,16 +12,6 @@
|
|||
|
||||
namespace MMIO { class Mapping; }
|
||||
|
||||
#define MEMCHECK_START \
|
||||
Gen::FixupBranch memException; \
|
||||
if (jit->js.memcheck && !jit->js.fastmemLoadStore) \
|
||||
{ TEST(32, PPCSTATE(Exceptions), Gen::Imm32(EXCEPTION_DSI)); \
|
||||
memException = J_CC(Gen::CC_NZ, true); }
|
||||
|
||||
#define MEMCHECK_END \
|
||||
if (jit->js.memcheck && !jit->js.fastmemLoadStore) \
|
||||
SetJumpTarget(memException);
|
||||
|
||||
// We offset by 0x80 because the range of one byte memory offsets is
|
||||
// -0x80..0x7f.
|
||||
#define PPCSTATE(x) MDisp(RPPCSTATE, \
|
||||
|
@ -59,6 +49,8 @@ public:
|
|||
FarCodeCache farcode;
|
||||
u8* nearcode; // Backed up when we switch to far code.
|
||||
|
||||
void MemoryExceptionCheck();
|
||||
|
||||
// Simple functions to switch between near and far code emitting
|
||||
void SwitchToFarCode()
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue