Automatically disable fastmem and enable memcheck when there are any watchpoints.
- Move JitState::memcheck to JitOptions because it's an option. - Add JitOptions::fastmem; switch JIT code to checking that rather than bFastmem directly. - Add JitBase::UpdateMemoryOptions(), which sets both two JIT options (replacing the duplicate lines in Jit64 and JitIL that set memcheck from bMMU). - (!) The ARM JITs both had some lines that checked js.memcheck despite it being uninitialized in their cases. I've added UpdateMemoryOptions to both. There is a chance this could make something slower compared to the old behavior if the uninitialized value happened to be nonzero... hdkr should check this. - UpdateMemoryOptions forces jo.fastmem and jo.memcheck off and on, respectively, if there are any watchpoints set. - Also call that function from ClearCache. - Have MemChecks call ClearCache when the {first,last} watchpoint is {added,removed}. Enabling jo.memcheck (bah, confusing names) is currently pointless because hitting a watchpoint does not interrupt the basic block. That will change in the next commit.
This commit is contained in:
parent
3499f2c2d0
commit
b84f6a55ab
|
@ -165,8 +165,13 @@ void MemChecks::AddFromStrings(const TMemChecksStr& mcstrs)
|
||||||
|
|
||||||
void MemChecks::Add(const TMemCheck& _rMemoryCheck)
|
void MemChecks::Add(const TMemCheck& _rMemoryCheck)
|
||||||
{
|
{
|
||||||
|
bool had_any = HasAny();
|
||||||
if (GetMemCheck(_rMemoryCheck.StartAddress) == nullptr)
|
if (GetMemCheck(_rMemoryCheck.StartAddress) == nullptr)
|
||||||
m_MemChecks.push_back(_rMemoryCheck);
|
m_MemChecks.push_back(_rMemoryCheck);
|
||||||
|
// If this is the first one, clear the JIT cache so it can switch to
|
||||||
|
// watchpoint-compatible code.
|
||||||
|
if (!had_any)
|
||||||
|
jit->ClearCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
void MemChecks::Remove(u32 _Address)
|
void MemChecks::Remove(u32 _Address)
|
||||||
|
@ -179,6 +184,8 @@ void MemChecks::Remove(u32 _Address)
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!HasAny())
|
||||||
|
jit->ClearCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
TMemCheck *MemChecks::GetMemCheck(u32 address)
|
TMemCheck *MemChecks::GetMemCheck(u32 address)
|
||||||
|
|
|
@ -105,6 +105,8 @@ public:
|
||||||
void Remove(u32 _Address);
|
void Remove(u32 _Address);
|
||||||
|
|
||||||
void Clear() { m_MemChecks.clear(); }
|
void Clear() { m_MemChecks.clear(); }
|
||||||
|
|
||||||
|
bool HasAny() const { return !m_MemChecks.empty(); }
|
||||||
};
|
};
|
||||||
|
|
||||||
class Watches
|
class Watches
|
||||||
|
|
|
@ -235,10 +235,10 @@ void Clear()
|
||||||
|
|
||||||
bool AreMemoryBreakpointsActivated()
|
bool AreMemoryBreakpointsActivated()
|
||||||
{
|
{
|
||||||
#ifndef ENABLE_MEM_CHECK
|
#ifdef ENABLE_MEM_CHECK
|
||||||
return false;
|
|
||||||
#else
|
|
||||||
return true;
|
return true;
|
||||||
|
#else
|
||||||
|
return false;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -178,14 +178,14 @@ void Jit64::Init()
|
||||||
|
|
||||||
jo.optimizeGatherPipe = true;
|
jo.optimizeGatherPipe = true;
|
||||||
jo.accurateSinglePrecision = true;
|
jo.accurateSinglePrecision = true;
|
||||||
js.memcheck = SConfig::GetInstance().m_LocalCoreStartupParameter.bMMU;
|
UpdateMemoryOptions();
|
||||||
js.fastmemLoadStore = nullptr;
|
js.fastmemLoadStore = nullptr;
|
||||||
js.compilerPC = 0;
|
js.compilerPC = 0;
|
||||||
|
|
||||||
gpr.SetEmitter(this);
|
gpr.SetEmitter(this);
|
||||||
fpr.SetEmitter(this);
|
fpr.SetEmitter(this);
|
||||||
|
|
||||||
trampolines.Init(js.memcheck ? TRAMPOLINE_CODE_SIZE_MMU : TRAMPOLINE_CODE_SIZE);
|
trampolines.Init(jo.memcheck ? TRAMPOLINE_CODE_SIZE_MMU : TRAMPOLINE_CODE_SIZE);
|
||||||
AllocCodeSpace(CODE_SIZE);
|
AllocCodeSpace(CODE_SIZE);
|
||||||
|
|
||||||
// BLR optimization has the same consequences as block linking, as well as
|
// BLR optimization has the same consequences as block linking, as well as
|
||||||
|
@ -202,7 +202,7 @@ void Jit64::Init()
|
||||||
|
|
||||||
// important: do this *after* generating the global asm routines, because we can't use farcode in them.
|
// important: do this *after* generating the global asm routines, because we can't use farcode in them.
|
||||||
// it'll crash because the farcode functions get cleared on JIT clears.
|
// it'll crash because the farcode functions get cleared on JIT clears.
|
||||||
farcode.Init(js.memcheck ? FARCODE_SIZE_MMU : FARCODE_SIZE);
|
farcode.Init(jo.memcheck ? FARCODE_SIZE_MMU : FARCODE_SIZE);
|
||||||
|
|
||||||
code_block.m_stats = &js.st;
|
code_block.m_stats = &js.st;
|
||||||
code_block.m_gpa = &js.gpa;
|
code_block.m_gpa = &js.gpa;
|
||||||
|
@ -216,6 +216,7 @@ void Jit64::ClearCache()
|
||||||
trampolines.ClearCodeSpace();
|
trampolines.ClearCodeSpace();
|
||||||
farcode.ClearCodeSpace();
|
farcode.ClearCodeSpace();
|
||||||
ClearCodeSpace();
|
ClearCodeSpace();
|
||||||
|
UpdateMemoryOptions();
|
||||||
m_clear_cache_asap = false;
|
m_clear_cache_asap = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -788,7 +789,7 @@ const u8* Jit64::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBloc
|
||||||
|
|
||||||
Jit64Tables::CompileInstruction(ops[i]);
|
Jit64Tables::CompileInstruction(ops[i]);
|
||||||
|
|
||||||
if (js.memcheck && (opinfo->flags & FL_LOADSTORE))
|
if (jo.memcheck && (opinfo->flags & FL_LOADSTORE))
|
||||||
{
|
{
|
||||||
// If we have a fastmem loadstore, we can omit the exception check and let fastmem handle it.
|
// If we have a fastmem loadstore, we can omit the exception check and let fastmem handle it.
|
||||||
FixupBranch memException;
|
FixupBranch memException;
|
||||||
|
|
|
@ -182,14 +182,14 @@ void Jit64::lXXx(UGeckoInstruction inst)
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if ((inst.OPCD != 31) && gpr.R(a).IsImm() && !js.memcheck)
|
if ((inst.OPCD != 31) && gpr.R(a).IsImm() && !jo.memcheck)
|
||||||
{
|
{
|
||||||
u32 val = gpr.R(a).Imm32() + inst.SIMM_16;
|
u32 val = gpr.R(a).Imm32() + inst.SIMM_16;
|
||||||
opAddress = Imm32(val);
|
opAddress = Imm32(val);
|
||||||
if (update)
|
if (update)
|
||||||
gpr.SetImmediate32(a, val);
|
gpr.SetImmediate32(a, val);
|
||||||
}
|
}
|
||||||
else if ((inst.OPCD == 31) && gpr.R(a).IsImm() && gpr.R(b).IsImm() && !js.memcheck)
|
else if ((inst.OPCD == 31) && gpr.R(a).IsImm() && gpr.R(b).IsImm() && !jo.memcheck)
|
||||||
{
|
{
|
||||||
u32 val = gpr.R(a).Imm32() + gpr.R(b).Imm32();
|
u32 val = gpr.R(a).Imm32() + gpr.R(b).Imm32();
|
||||||
opAddress = Imm32(val);
|
opAddress = Imm32(val);
|
||||||
|
@ -206,7 +206,7 @@ void Jit64::lXXx(UGeckoInstruction inst)
|
||||||
offset = inst.OPCD == 31 ? gpr.R(b).SImm32() : (s32)inst.SIMM_16;
|
offset = inst.OPCD == 31 ? gpr.R(b).SImm32() : (s32)inst.SIMM_16;
|
||||||
// Depending on whether we have an immediate and/or update, find the optimum way to calculate
|
// Depending on whether we have an immediate and/or update, find the optimum way to calculate
|
||||||
// the load address.
|
// the load address.
|
||||||
if ((update || use_constant_offset) && !js.memcheck)
|
if ((update || use_constant_offset) && !jo.memcheck)
|
||||||
{
|
{
|
||||||
gpr.BindToRegister(a, true, update);
|
gpr.BindToRegister(a, true, update);
|
||||||
opAddress = gpr.R(a);
|
opAddress = gpr.R(a);
|
||||||
|
@ -259,7 +259,7 @@ void Jit64::lXXx(UGeckoInstruction inst)
|
||||||
// clobber it, then restore the value in the exception path.
|
// clobber it, then restore the value in the exception path.
|
||||||
// TODO: no other load has to do this at the moment, since no other loads go directly to the
|
// TODO: no other load has to do this at the moment, since no other loads go directly to the
|
||||||
// target registers, but if that ever changes, we need to do it there too.
|
// target registers, but if that ever changes, we need to do it there too.
|
||||||
if (js.memcheck)
|
if (jo.memcheck)
|
||||||
{
|
{
|
||||||
gpr.StoreFromRegister(d);
|
gpr.StoreFromRegister(d);
|
||||||
js.revertGprLoad = d;
|
js.revertGprLoad = d;
|
||||||
|
@ -392,7 +392,7 @@ void Jit64::stX(UGeckoInstruction inst)
|
||||||
bool exception = WriteToConstAddress(accessSize, gpr.R(s), addr, CallerSavedRegistersInUse());
|
bool exception = WriteToConstAddress(accessSize, gpr.R(s), addr, CallerSavedRegistersInUse());
|
||||||
if (update)
|
if (update)
|
||||||
{
|
{
|
||||||
if (!js.memcheck || !exception)
|
if (!jo.memcheck || !exception)
|
||||||
{
|
{
|
||||||
gpr.SetImmediate32(a, addr);
|
gpr.SetImmediate32(a, addr);
|
||||||
}
|
}
|
||||||
|
@ -445,7 +445,7 @@ void Jit64::stXx(UGeckoInstruction inst)
|
||||||
int a = inst.RA, b = inst.RB, s = inst.RS;
|
int a = inst.RA, b = inst.RB, s = inst.RS;
|
||||||
bool update = !!(inst.SUBOP10 & 32);
|
bool update = !!(inst.SUBOP10 & 32);
|
||||||
bool byte_reverse = !!(inst.SUBOP10 & 512);
|
bool byte_reverse = !!(inst.SUBOP10 & 512);
|
||||||
FALLBACK_IF(!a || (update && a == s) || (update && js.memcheck && a == b));
|
FALLBACK_IF(!a || (update && a == s) || (update && jo.memcheck && a == b));
|
||||||
|
|
||||||
gpr.Lock(a, b, s);
|
gpr.Lock(a, b, s);
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ void Jit64::lfXXX(UGeckoInstruction inst)
|
||||||
|
|
||||||
s32 offset = 0;
|
s32 offset = 0;
|
||||||
OpArg addr = gpr.R(a);
|
OpArg addr = gpr.R(a);
|
||||||
if (update && js.memcheck)
|
if (update && jo.memcheck)
|
||||||
{
|
{
|
||||||
addr = R(RSCRATCH2);
|
addr = R(RSCRATCH2);
|
||||||
MOV(32, addr, gpr.R(a));
|
MOV(32, addr, gpr.R(a));
|
||||||
|
@ -66,14 +66,14 @@ void Jit64::lfXXX(UGeckoInstruction inst)
|
||||||
}
|
}
|
||||||
|
|
||||||
fpr.Lock(d);
|
fpr.Lock(d);
|
||||||
if (js.memcheck && single)
|
if (jo.memcheck && single)
|
||||||
{
|
{
|
||||||
fpr.StoreFromRegister(d);
|
fpr.StoreFromRegister(d);
|
||||||
js.revertFprLoad = d;
|
js.revertFprLoad = d;
|
||||||
}
|
}
|
||||||
fpr.BindToRegister(d, !single);
|
fpr.BindToRegister(d, !single);
|
||||||
BitSet32 registersInUse = CallerSavedRegistersInUse();
|
BitSet32 registersInUse = CallerSavedRegistersInUse();
|
||||||
if (update && js.memcheck)
|
if (update && jo.memcheck)
|
||||||
registersInUse[RSCRATCH2] = true;
|
registersInUse[RSCRATCH2] = true;
|
||||||
SafeLoadToReg(RSCRATCH, addr, single ? 32 : 64, offset, registersInUse, false);
|
SafeLoadToReg(RSCRATCH, addr, single ? 32 : 64, offset, registersInUse, false);
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ void Jit64::lfXXX(UGeckoInstruction inst)
|
||||||
MOVQ_xmm(XMM0, R(RSCRATCH));
|
MOVQ_xmm(XMM0, R(RSCRATCH));
|
||||||
MOVSD(fpr.RX(d), R(XMM0));
|
MOVSD(fpr.RX(d), R(XMM0));
|
||||||
}
|
}
|
||||||
if (update && js.memcheck)
|
if (update && jo.memcheck)
|
||||||
MOV(32, gpr.R(a), addr);
|
MOV(32, gpr.R(a), addr);
|
||||||
fpr.UnlockAll();
|
fpr.UnlockAll();
|
||||||
gpr.UnlockAll();
|
gpr.UnlockAll();
|
||||||
|
@ -108,7 +108,7 @@ void Jit64::stfXXX(UGeckoInstruction inst)
|
||||||
s32 imm = (s16)inst.SIMM_16;
|
s32 imm = (s16)inst.SIMM_16;
|
||||||
int accessSize = single ? 32 : 64;
|
int accessSize = single ? 32 : 64;
|
||||||
|
|
||||||
FALLBACK_IF(update && js.memcheck && a == b);
|
FALLBACK_IF(update && jo.memcheck && a == b);
|
||||||
|
|
||||||
if (single)
|
if (single)
|
||||||
{
|
{
|
||||||
|
@ -138,7 +138,7 @@ void Jit64::stfXXX(UGeckoInstruction inst)
|
||||||
|
|
||||||
if (update)
|
if (update)
|
||||||
{
|
{
|
||||||
if (!js.memcheck || !exception)
|
if (!jo.memcheck || !exception)
|
||||||
{
|
{
|
||||||
gpr.SetImmediate32(a, addr);
|
gpr.SetImmediate32(a, addr);
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,7 +40,7 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
|
||||||
X64Reg addr = gpr.RX(a);
|
X64Reg addr = gpr.RX(a);
|
||||||
// TODO: this is kind of ugly :/ we should probably create a universal load/store address calculation
|
// TODO: this is kind of ugly :/ we should probably create a universal load/store address calculation
|
||||||
// function that handles all these weird cases, e.g. how non-fastmem loadstores clobber addresses.
|
// function that handles all these weird cases, e.g. how non-fastmem loadstores clobber addresses.
|
||||||
bool storeAddress = (update && js.memcheck) || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem;
|
bool storeAddress = (update && jo.memcheck) || !jo.fastmem;
|
||||||
if (storeAddress)
|
if (storeAddress)
|
||||||
{
|
{
|
||||||
addr = RSCRATCH2;
|
addr = RSCRATCH2;
|
||||||
|
@ -118,7 +118,7 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
|
||||||
ADD(32, R(RSCRATCH_EXTRA), Imm32((u32)offset));
|
ADD(32, R(RSCRATCH_EXTRA), Imm32((u32)offset));
|
||||||
}
|
}
|
||||||
// In memcheck mode, don't update the address until the exception check
|
// In memcheck mode, don't update the address until the exception check
|
||||||
if (update && !js.memcheck)
|
if (update && !jo.memcheck)
|
||||||
MOV(32, gpr.R(a), R(RSCRATCH_EXTRA));
|
MOV(32, gpr.R(a), R(RSCRATCH_EXTRA));
|
||||||
// Some games (e.g. Dirt 2) incorrectly set the unused bits which breaks the lookup table code.
|
// Some games (e.g. Dirt 2) incorrectly set the unused bits which breaks the lookup table code.
|
||||||
// Hence, we need to mask out the unused bits. The layout of the GQR register is
|
// Hence, we need to mask out the unused bits. The layout of the GQR register is
|
||||||
|
@ -141,7 +141,7 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
|
||||||
CALLptr(MScaled(RSCRATCH, SCALE_8, (u32)(u64)asm_routines.pairedStoreQuantized));
|
CALLptr(MScaled(RSCRATCH, SCALE_8, (u32)(u64)asm_routines.pairedStoreQuantized));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (update && js.memcheck)
|
if (update && jo.memcheck)
|
||||||
{
|
{
|
||||||
MemoryExceptionCheck();
|
MemoryExceptionCheck();
|
||||||
if (indexed)
|
if (indexed)
|
||||||
|
@ -174,7 +174,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
||||||
s32 loadOffset = 0;
|
s32 loadOffset = 0;
|
||||||
gpr.BindToRegister(a, true, update);
|
gpr.BindToRegister(a, true, update);
|
||||||
X64Reg addr = gpr.RX(a);
|
X64Reg addr = gpr.RX(a);
|
||||||
if (update && js.memcheck)
|
if (update && jo.memcheck)
|
||||||
{
|
{
|
||||||
addr = RSCRATCH2;
|
addr = RSCRATCH2;
|
||||||
MOV(32, R(addr), gpr.R(a));
|
MOV(32, R(addr), gpr.R(a));
|
||||||
|
@ -209,7 +209,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
||||||
}
|
}
|
||||||
|
|
||||||
fpr.Lock(s);
|
fpr.Lock(s);
|
||||||
if (js.memcheck)
|
if (jo.memcheck)
|
||||||
{
|
{
|
||||||
fpr.StoreFromRegister(s);
|
fpr.StoreFromRegister(s);
|
||||||
js.revertFprLoad = s;
|
js.revertFprLoad = s;
|
||||||
|
@ -217,7 +217,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
||||||
fpr.BindToRegister(s, false);
|
fpr.BindToRegister(s, false);
|
||||||
|
|
||||||
// Let's mirror the JitAsmCommon code and assume all non-MMU loads go to RAM.
|
// Let's mirror the JitAsmCommon code and assume all non-MMU loads go to RAM.
|
||||||
if (!js.memcheck)
|
if (!jo.memcheck)
|
||||||
{
|
{
|
||||||
if (w)
|
if (w)
|
||||||
{
|
{
|
||||||
|
@ -295,7 +295,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
||||||
ADD(32, R(RSCRATCH_EXTRA), Imm32((u32)offset));
|
ADD(32, R(RSCRATCH_EXTRA), Imm32((u32)offset));
|
||||||
}
|
}
|
||||||
// In memcheck mode, don't update the address until the exception check
|
// In memcheck mode, don't update the address until the exception check
|
||||||
if (update && !js.memcheck)
|
if (update && !jo.memcheck)
|
||||||
MOV(32, gpr.R(a), R(RSCRATCH_EXTRA));
|
MOV(32, gpr.R(a), R(RSCRATCH_EXTRA));
|
||||||
MOV(32, R(RSCRATCH2), Imm32(0x3F07));
|
MOV(32, R(RSCRATCH2), Imm32(0x3F07));
|
||||||
|
|
||||||
|
@ -310,7 +310,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
|
||||||
|
|
||||||
MemoryExceptionCheck();
|
MemoryExceptionCheck();
|
||||||
CVTPS2PD(fpr.RX(s), R(XMM0));
|
CVTPS2PD(fpr.RX(s), R(XMM0));
|
||||||
if (update && js.memcheck)
|
if (update && jo.memcheck)
|
||||||
{
|
{
|
||||||
if (indexed)
|
if (indexed)
|
||||||
ADD(32, gpr.R(a), gpr.R(b));
|
ADD(32, gpr.R(a), gpr.R(b));
|
||||||
|
|
|
@ -420,7 +420,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
// If we find something that actually does do this, maybe this should be changed. How
|
// If we find something that actually does do this, maybe this should be changed. How
|
||||||
// much of a performance hit would it be?
|
// much of a performance hit would it be?
|
||||||
const u8* loadPairedFloatTwo = AlignCode4();
|
const u8* loadPairedFloatTwo = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
{
|
{
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 64, 0, QUANTIZED_REGS_TO_SAVE, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 64, 0, QUANTIZED_REGS_TO_SAVE, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
ROL(64, R(RSCRATCH_EXTRA), Imm8(32));
|
ROL(64, R(RSCRATCH_EXTRA), Imm8(32));
|
||||||
|
@ -440,7 +440,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedFloatOne = AlignCode4();
|
const u8* loadPairedFloatOne = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
{
|
{
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 32, 0, QUANTIZED_REGS_TO_SAVE, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 32, 0, QUANTIZED_REGS_TO_SAVE, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
MOVD_xmm(XMM0, R(RSCRATCH_EXTRA));
|
MOVD_xmm(XMM0, R(RSCRATCH_EXTRA));
|
||||||
|
@ -461,7 +461,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedU8Two = AlignCode4();
|
const u8* loadPairedU8Two = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
{
|
{
|
||||||
// TODO: Support not swapping in safeLoadToReg to avoid bswapping twice
|
// TODO: Support not swapping in safeLoadToReg to avoid bswapping twice
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
|
@ -489,7 +489,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedU8One = AlignCode4();
|
const u8* loadPairedU8One = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 8, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 8, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
else
|
else
|
||||||
UnsafeLoadRegToRegNoSwap(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 8, 0); // RSCRATCH_EXTRA = 0x000000xx
|
UnsafeLoadRegToRegNoSwap(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 8, 0); // RSCRATCH_EXTRA = 0x000000xx
|
||||||
|
@ -500,7 +500,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedS8Two = AlignCode4();
|
const u8* loadPairedS8Two = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
{
|
{
|
||||||
// TODO: Support not swapping in safeLoadToReg to avoid bswapping twice
|
// TODO: Support not swapping in safeLoadToReg to avoid bswapping twice
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
|
@ -528,7 +528,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedS8One = AlignCode4();
|
const u8* loadPairedS8One = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 8, 0, QUANTIZED_REGS_TO_SAVE_LOAD, true, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 8, 0, QUANTIZED_REGS_TO_SAVE_LOAD, true, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
else
|
else
|
||||||
UnsafeLoadRegToRegNoSwap(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 8, 0, true);
|
UnsafeLoadRegToRegNoSwap(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 8, 0, true);
|
||||||
|
@ -540,7 +540,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
|
|
||||||
const u8* loadPairedU16Two = AlignCode4();
|
const u8* loadPairedU16Two = AlignCode4();
|
||||||
// TODO: Support not swapping in (un)safeLoadToReg to avoid bswapping twice
|
// TODO: Support not swapping in (un)safeLoadToReg to avoid bswapping twice
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 32, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 32, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
else
|
else
|
||||||
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 32, 0, false);
|
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 32, 0, false);
|
||||||
|
@ -562,7 +562,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedU16One = AlignCode4();
|
const u8* loadPairedU16One = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
else
|
else
|
||||||
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 16, 0, false);
|
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 16, 0, false);
|
||||||
|
@ -573,7 +573,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedS16Two = AlignCode4();
|
const u8* loadPairedS16Two = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 32, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 32, 0, QUANTIZED_REGS_TO_SAVE_LOAD, false, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
else
|
else
|
||||||
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 32, 0, false);
|
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 32, 0, false);
|
||||||
|
@ -595,7 +595,7 @@ void CommonAsmRoutines::GenQuantizedLoads()
|
||||||
RET();
|
RET();
|
||||||
|
|
||||||
const u8* loadPairedS16One = AlignCode4();
|
const u8* loadPairedS16One = AlignCode4();
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, true, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), 16, 0, QUANTIZED_REGS_TO_SAVE_LOAD, true, SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG);
|
||||||
else
|
else
|
||||||
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 16, 0, true);
|
UnsafeLoadRegToReg(RSCRATCH_EXTRA, RSCRATCH_EXTRA, 16, 0, true);
|
||||||
|
|
|
@ -246,14 +246,14 @@ void JitIL::Init()
|
||||||
|
|
||||||
jo.optimizeGatherPipe = true;
|
jo.optimizeGatherPipe = true;
|
||||||
jo.accurateSinglePrecision = false;
|
jo.accurateSinglePrecision = false;
|
||||||
js.memcheck = SConfig::GetInstance().m_LocalCoreStartupParameter.bMMU;
|
UpdateMemoryOptions();
|
||||||
|
|
||||||
trampolines.Init(js.memcheck ? TRAMPOLINE_CODE_SIZE_MMU : TRAMPOLINE_CODE_SIZE);
|
trampolines.Init(jo.memcheck ? TRAMPOLINE_CODE_SIZE_MMU : TRAMPOLINE_CODE_SIZE);
|
||||||
AllocCodeSpace(CODE_SIZE);
|
AllocCodeSpace(CODE_SIZE);
|
||||||
blocks.Init();
|
blocks.Init();
|
||||||
asm_routines.Init(nullptr);
|
asm_routines.Init(nullptr);
|
||||||
|
|
||||||
farcode.Init(js.memcheck ? FARCODE_SIZE_MMU : FARCODE_SIZE);
|
farcode.Init(jo.memcheck ? FARCODE_SIZE_MMU : FARCODE_SIZE);
|
||||||
|
|
||||||
code_block.m_stats = &js.st;
|
code_block.m_stats = &js.st;
|
||||||
code_block.m_gpa = &js.gpa;
|
code_block.m_gpa = &js.gpa;
|
||||||
|
@ -624,7 +624,7 @@ const u8* JitIL::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBloc
|
||||||
|
|
||||||
if (!ops[i].skip)
|
if (!ops[i].skip)
|
||||||
{
|
{
|
||||||
if (js.memcheck && (opinfo->flags & FL_USE_FPU))
|
if (jo.memcheck && (opinfo->flags & FL_USE_FPU))
|
||||||
{
|
{
|
||||||
ibuild.EmitFPExceptionCheck(ibuild.EmitIntConst(ops[i].address));
|
ibuild.EmitFPExceptionCheck(ibuild.EmitIntConst(ops[i].address));
|
||||||
}
|
}
|
||||||
|
@ -644,7 +644,7 @@ const u8* JitIL::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBloc
|
||||||
|
|
||||||
JitILTables::CompileInstruction(ops[i]);
|
JitILTables::CompileInstruction(ops[i]);
|
||||||
|
|
||||||
if (js.memcheck && (opinfo->flags & FL_LOADSTORE))
|
if (jo.memcheck && (opinfo->flags & FL_LOADSTORE))
|
||||||
{
|
{
|
||||||
ibuild.EmitDSIExceptionCheck(ibuild.EmitIntConst(ops[i].address));
|
ibuild.EmitDSIExceptionCheck(ibuild.EmitIntConst(ops[i].address));
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@ void JitArm::Init()
|
||||||
fpr.Init(this);
|
fpr.Init(this);
|
||||||
jo.enableBlocklink = true;
|
jo.enableBlocklink = true;
|
||||||
jo.optimizeGatherPipe = true;
|
jo.optimizeGatherPipe = true;
|
||||||
|
UpdateMemoryOptions();
|
||||||
|
|
||||||
code_block.m_stats = &js.st;
|
code_block.m_stats = &js.st;
|
||||||
code_block.m_gpa = &js.gpa;
|
code_block.m_gpa = &js.gpa;
|
||||||
|
@ -45,6 +46,7 @@ void JitArm::ClearCache()
|
||||||
{
|
{
|
||||||
ClearCodeSpace();
|
ClearCodeSpace();
|
||||||
blocks.Clear();
|
blocks.Clear();
|
||||||
|
UpdateMemoryOptions();
|
||||||
}
|
}
|
||||||
|
|
||||||
void JitArm::Shutdown()
|
void JitArm::Shutdown()
|
||||||
|
@ -467,7 +469,7 @@ const u8* JitArm::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBlo
|
||||||
|
|
||||||
if (!ops[i].skip)
|
if (!ops[i].skip)
|
||||||
{
|
{
|
||||||
if (js.memcheck && (opinfo->flags & FL_USE_FPU))
|
if (jo.memcheck && (opinfo->flags & FL_USE_FPU))
|
||||||
{
|
{
|
||||||
// Don't do this yet
|
// Don't do this yet
|
||||||
BKPT(0x7777);
|
BKPT(0x7777);
|
||||||
|
@ -480,7 +482,7 @@ const u8* JitArm::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitBlo
|
||||||
for (int j : ~ops[i].fprInUse)
|
for (int j : ~ops[i].fprInUse)
|
||||||
fpr.StoreFromRegister(j);
|
fpr.StoreFromRegister(j);
|
||||||
|
|
||||||
if (js.memcheck && (opinfo->flags & FL_LOADSTORE))
|
if (jo.memcheck && (opinfo->flags & FL_LOADSTORE))
|
||||||
{
|
{
|
||||||
// Don't do this yet
|
// Don't do this yet
|
||||||
BKPT(0x666);
|
BKPT(0x666);
|
||||||
|
|
|
@ -148,7 +148,7 @@ void JitArm::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, int accessSize
|
||||||
else if (PowerPC::IsOptimizableRAMAddress(imm_addr))
|
else if (PowerPC::IsOptimizableRAMAddress(imm_addr))
|
||||||
{
|
{
|
||||||
MOVI2R(rA, imm_addr);
|
MOVI2R(rA, imm_addr);
|
||||||
EmitBackpatchRoutine(this, flags, SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem, true, RS);
|
EmitBackpatchRoutine(this, flags, jo.fastmem, true, RS);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -158,7 +158,7 @@ void JitArm::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, int accessSize
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
EmitBackpatchRoutine(this, flags, SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem, true, RS);
|
EmitBackpatchRoutine(this, flags, jo.fastmem, true, RS);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -351,7 +351,7 @@ void JitArm::SafeLoadToReg(ARMReg dest, s32 addr, s32 offsetReg, int accessSize,
|
||||||
flags |= BackPatchInfo::FLAG_EXTEND;
|
flags |= BackPatchInfo::FLAG_EXTEND;
|
||||||
|
|
||||||
EmitBackpatchRoutine(this, flags,
|
EmitBackpatchRoutine(this, flags,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
true, dest);
|
true, dest);
|
||||||
|
|
||||||
if (update)
|
if (update)
|
||||||
|
@ -482,7 +482,7 @@ void JitArm::lmw(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(!SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(!jo.fastmem);
|
||||||
|
|
||||||
u32 a = inst.RA;
|
u32 a = inst.RA;
|
||||||
ARMReg rA = gpr.GetReg();
|
ARMReg rA = gpr.GetReg();
|
||||||
|
@ -506,7 +506,7 @@ void JitArm::stmw(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(!SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(!jo.fastmem);
|
||||||
|
|
||||||
u32 a = inst.RA;
|
u32 a = inst.RA;
|
||||||
ARMReg rA = gpr.GetReg();
|
ARMReg rA = gpr.GetReg();
|
||||||
|
|
|
@ -182,7 +182,7 @@ void JitArm::lfXX(UGeckoInstruction inst)
|
||||||
MOV(RA, addr);
|
MOV(RA, addr);
|
||||||
|
|
||||||
EmitBackpatchRoutine(this, flags,
|
EmitBackpatchRoutine(this, flags,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
!(is_immediate && PowerPC::IsOptimizableRAMAddress(imm_addr)), v0, v1);
|
!(is_immediate && PowerPC::IsOptimizableRAMAddress(imm_addr)), v0, v1);
|
||||||
|
|
||||||
SetJumpTarget(DoNotLoad);
|
SetJumpTarget(DoNotLoad);
|
||||||
|
@ -387,7 +387,7 @@ void JitArm::stfXX(UGeckoInstruction inst)
|
||||||
else if (PowerPC::IsOptimizableRAMAddress(imm_addr))
|
else if (PowerPC::IsOptimizableRAMAddress(imm_addr))
|
||||||
{
|
{
|
||||||
MOVI2R(addr, imm_addr);
|
MOVI2R(addr, imm_addr);
|
||||||
EmitBackpatchRoutine(this, flags, SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem, false, v0);
|
EmitBackpatchRoutine(this, flags, jo.fastmem, false, v0);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -397,7 +397,7 @@ void JitArm::stfXX(UGeckoInstruction inst)
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
EmitBackpatchRoutine(this, flags, SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem, true, v0);
|
EmitBackpatchRoutine(this, flags, jo.fastmem, true, v0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ void JitArm::psq_l(UGeckoInstruction inst)
|
||||||
// R12 contains scale
|
// R12 contains scale
|
||||||
// R11 contains type
|
// R11 contains type
|
||||||
// R10 is the ADDR
|
// R10 is the ADDR
|
||||||
FALLBACK_IF(js.memcheck || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(jo.memcheck || !jo.fastmem);
|
||||||
|
|
||||||
bool update = inst.OPCD == 57;
|
bool update = inst.OPCD == 57;
|
||||||
s32 offset = inst.SIMM_12;
|
s32 offset = inst.SIMM_12;
|
||||||
|
@ -76,7 +76,7 @@ void JitArm::psq_lx(UGeckoInstruction inst)
|
||||||
// R12 contains scale
|
// R12 contains scale
|
||||||
// R11 contains type
|
// R11 contains type
|
||||||
// R10 is the ADDR
|
// R10 is the ADDR
|
||||||
FALLBACK_IF(js.memcheck || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(jo.memcheck || !jo.fastmem);
|
||||||
|
|
||||||
bool update = inst.SUBOP10 == 38;
|
bool update = inst.SUBOP10 == 38;
|
||||||
|
|
||||||
|
@ -127,7 +127,7 @@ void JitArm::psq_st(UGeckoInstruction inst)
|
||||||
// R12 contains scale
|
// R12 contains scale
|
||||||
// R11 contains type
|
// R11 contains type
|
||||||
// R10 is the ADDR
|
// R10 is the ADDR
|
||||||
FALLBACK_IF(js.memcheck || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(jo.memcheck || !jo.fastmem);
|
||||||
|
|
||||||
bool update = inst.OPCD == 61;
|
bool update = inst.OPCD == 61;
|
||||||
s32 offset = inst.SIMM_12;
|
s32 offset = inst.SIMM_12;
|
||||||
|
@ -179,7 +179,7 @@ void JitArm::psq_stx(UGeckoInstruction inst)
|
||||||
// R12 contains scale
|
// R12 contains scale
|
||||||
// R11 contains type
|
// R11 contains type
|
||||||
// R10 is the ADDR
|
// R10 is the ADDR
|
||||||
FALLBACK_IF(js.memcheck || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(jo.memcheck || !jo.fastmem);
|
||||||
|
|
||||||
bool update = inst.SUBOP10 == 39;
|
bool update = inst.SUBOP10 == 39;
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ void JitArm64::Init()
|
||||||
AllocCodeSpace(CODE_SIZE);
|
AllocCodeSpace(CODE_SIZE);
|
||||||
jo.enableBlocklink = true;
|
jo.enableBlocklink = true;
|
||||||
jo.optimizeGatherPipe = true;
|
jo.optimizeGatherPipe = true;
|
||||||
|
UpdateMemoryOptions();
|
||||||
gpr.Init(this);
|
gpr.Init(this);
|
||||||
fpr.Init(this);
|
fpr.Init(this);
|
||||||
|
|
||||||
|
@ -34,6 +35,7 @@ void JitArm64::ClearCache()
|
||||||
{
|
{
|
||||||
ClearCodeSpace();
|
ClearCodeSpace();
|
||||||
blocks.Clear();
|
blocks.Clear();
|
||||||
|
UpdateMemoryOptions();
|
||||||
}
|
}
|
||||||
|
|
||||||
void JitArm64::Shutdown()
|
void JitArm64::Shutdown()
|
||||||
|
@ -295,7 +297,7 @@ const u8* JitArm64::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitB
|
||||||
|
|
||||||
if (!ops[i].skip)
|
if (!ops[i].skip)
|
||||||
{
|
{
|
||||||
if (js.memcheck && (opinfo->flags & FL_USE_FPU))
|
if (jo.memcheck && (opinfo->flags & FL_USE_FPU))
|
||||||
{
|
{
|
||||||
// Don't do this yet
|
// Don't do this yet
|
||||||
BRK(0x7777);
|
BRK(0x7777);
|
||||||
|
@ -309,7 +311,7 @@ const u8* JitArm64::DoJit(u32 em_address, PPCAnalyst::CodeBuffer *code_buf, JitB
|
||||||
for (int j : ~ops[i].fprInUse)
|
for (int j : ~ops[i].fprInUse)
|
||||||
fpr.StoreRegister(j);
|
fpr.StoreRegister(j);
|
||||||
|
|
||||||
if (js.memcheck && (opinfo->flags & FL_LOADSTORE))
|
if (jo.memcheck && (opinfo->flags & FL_LOADSTORE))
|
||||||
{
|
{
|
||||||
// Don't do this yet
|
// Don't do this yet
|
||||||
BRK(0x666);
|
BRK(0x666);
|
||||||
|
|
|
@ -172,8 +172,8 @@ void JitArm64::SafeLoadToReg(u32 dest, s32 addr, s32 offsetReg, u32 flags, s32 o
|
||||||
ABI_PushRegisters(regs_in_use);
|
ABI_PushRegisters(regs_in_use);
|
||||||
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
||||||
EmitBackpatchRoutine(this, flags,
|
EmitBackpatchRoutine(this, flags,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
dest_reg, XA);
|
dest_reg, XA);
|
||||||
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
||||||
ABI_PopRegisters(regs_in_use);
|
ABI_PopRegisters(regs_in_use);
|
||||||
|
@ -323,8 +323,8 @@ void JitArm64::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, u32 flags, s
|
||||||
ABI_PushRegisters(regs_in_use);
|
ABI_PushRegisters(regs_in_use);
|
||||||
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
||||||
EmitBackpatchRoutine(this, flags,
|
EmitBackpatchRoutine(this, flags,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
RS, XA);
|
RS, XA);
|
||||||
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
||||||
ABI_PopRegisters(regs_in_use);
|
ABI_PopRegisters(regs_in_use);
|
||||||
|
|
|
@ -196,8 +196,8 @@ void JitArm64::lfXX(UGeckoInstruction inst)
|
||||||
ABI_PushRegisters(regs_in_use);
|
ABI_PushRegisters(regs_in_use);
|
||||||
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
||||||
EmitBackpatchRoutine(this, flags,
|
EmitBackpatchRoutine(this, flags,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
VD, XA);
|
VD, XA);
|
||||||
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
||||||
ABI_PopRegisters(regs_in_use);
|
ABI_PopRegisters(regs_in_use);
|
||||||
|
@ -426,8 +426,8 @@ void JitArm64::stfXX(UGeckoInstruction inst)
|
||||||
ABI_PushRegisters(regs_in_use);
|
ABI_PushRegisters(regs_in_use);
|
||||||
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PushRegisters(fprs_in_use, X30);
|
||||||
EmitBackpatchRoutine(this, flags,
|
EmitBackpatchRoutine(this, flags,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem,
|
jo.fastmem,
|
||||||
V0, XA);
|
V0, XA);
|
||||||
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
m_float_emit.ABI_PopRegisters(fprs_in_use, X30);
|
||||||
ABI_PopRegisters(regs_in_use);
|
ABI_PopRegisters(regs_in_use);
|
||||||
|
|
|
@ -20,7 +20,7 @@ void JitArm64::psq_l(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStorePairedOff);
|
JITDISABLE(bJITLoadStorePairedOff);
|
||||||
FALLBACK_IF(js.memcheck || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(jo.memcheck || !jo.fastmem);
|
||||||
|
|
||||||
// X30 is LR
|
// X30 is LR
|
||||||
// X0 contains the scale
|
// X0 contains the scale
|
||||||
|
@ -83,7 +83,7 @@ void JitArm64::psq_st(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStorePairedOff);
|
JITDISABLE(bJITLoadStorePairedOff);
|
||||||
FALLBACK_IF(js.memcheck || !SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem);
|
FALLBACK_IF(jo.memcheck || !jo.fastmem);
|
||||||
|
|
||||||
// X30 is LR
|
// X30 is LR
|
||||||
// X0 contains the scale
|
// X0 contains the scale
|
||||||
|
|
|
@ -77,7 +77,7 @@ bool Jitx86Base::BackPatch(u32 emAddress, SContext* ctx)
|
||||||
BitSet32 registersInUse = it->second;
|
BitSet32 registersInUse = it->second;
|
||||||
|
|
||||||
u8* exceptionHandler = nullptr;
|
u8* exceptionHandler = nullptr;
|
||||||
if (jit->js.memcheck)
|
if (jit->jo.memcheck)
|
||||||
{
|
{
|
||||||
auto it2 = exceptionHandlerAtLoc.find(codePtr);
|
auto it2 = exceptionHandlerAtLoc.find(codePtr);
|
||||||
if (it2 != exceptionHandlerAtLoc.end())
|
if (it2 != exceptionHandlerAtLoc.end())
|
||||||
|
|
|
@ -82,3 +82,12 @@ bool JitBase::MergeAllowedNextInstructions(int count)
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void JitBase::UpdateMemoryOptions()
|
||||||
|
{
|
||||||
|
bool any_watchpoints = PowerPC::memchecks.HasAny();
|
||||||
|
jo.fastmem = SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem &&
|
||||||
|
!any_watchpoints;
|
||||||
|
jo.memcheck = SConfig::GetInstance().m_LocalCoreStartupParameter.bMMU ||
|
||||||
|
any_watchpoints;
|
||||||
|
}
|
||||||
|
|
|
@ -61,6 +61,8 @@ protected:
|
||||||
bool enableBlocklink;
|
bool enableBlocklink;
|
||||||
bool optimizeGatherPipe;
|
bool optimizeGatherPipe;
|
||||||
bool accurateSinglePrecision;
|
bool accurateSinglePrecision;
|
||||||
|
bool fastmem;
|
||||||
|
bool memcheck;
|
||||||
};
|
};
|
||||||
struct JitState
|
struct JitState
|
||||||
{
|
{
|
||||||
|
@ -85,7 +87,6 @@ protected:
|
||||||
bool assumeNoPairedQuantize;
|
bool assumeNoPairedQuantize;
|
||||||
bool firstFPInstructionFound;
|
bool firstFPInstructionFound;
|
||||||
bool isLastInstruction;
|
bool isLastInstruction;
|
||||||
bool memcheck;
|
|
||||||
int skipInstructions;
|
int skipInstructions;
|
||||||
bool carryFlagSet;
|
bool carryFlagSet;
|
||||||
bool carryFlagInverted;
|
bool carryFlagInverted;
|
||||||
|
@ -109,6 +110,8 @@ protected:
|
||||||
|
|
||||||
bool MergeAllowedNextInstructions(int count);
|
bool MergeAllowedNextInstructions(int count);
|
||||||
|
|
||||||
|
void UpdateMemoryOptions();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
// This should probably be removed from public:
|
// This should probably be removed from public:
|
||||||
JitOptions jo;
|
JitOptions jo;
|
||||||
|
|
|
@ -14,7 +14,7 @@ using namespace Gen;
|
||||||
|
|
||||||
void EmuCodeBlock::MemoryExceptionCheck()
|
void EmuCodeBlock::MemoryExceptionCheck()
|
||||||
{
|
{
|
||||||
if (jit->js.memcheck && !jit->js.fastmemLoadStore && !jit->js.fixupExceptionHandler)
|
if (jit->jo.memcheck && !jit->js.fastmemLoadStore && !jit->js.fixupExceptionHandler)
|
||||||
{
|
{
|
||||||
TEST(32, PPCSTATE(Exceptions), Gen::Imm32(EXCEPTION_DSI));
|
TEST(32, PPCSTATE(Exceptions), Gen::Imm32(EXCEPTION_DSI));
|
||||||
jit->js.exceptionHandler = J_CC(Gen::CC_NZ, true);
|
jit->js.exceptionHandler = J_CC(Gen::CC_NZ, true);
|
||||||
|
@ -254,7 +254,7 @@ FixupBranch EmuCodeBlock::CheckIfSafeAddress(OpArg reg_value, X64Reg reg_addr, B
|
||||||
// assuming they'll never do an invalid memory access.
|
// assuming they'll never do an invalid memory access.
|
||||||
// The slightly more complex check needed for Wii games using the space just above MEM1 isn't
|
// The slightly more complex check needed for Wii games using the space just above MEM1 isn't
|
||||||
// implemented here yet, since there are no known working Wii MMU games to test it with.
|
// implemented here yet, since there are no known working Wii MMU games to test it with.
|
||||||
if (jit->js.memcheck && !SConfig::GetInstance().m_LocalCoreStartupParameter.bWii)
|
if (jit->jo.memcheck && !SConfig::GetInstance().m_LocalCoreStartupParameter.bWii)
|
||||||
{
|
{
|
||||||
if (scratch == reg_addr)
|
if (scratch == reg_addr)
|
||||||
PUSH(scratch);
|
PUSH(scratch);
|
||||||
|
@ -276,7 +276,7 @@ FixupBranch EmuCodeBlock::CheckIfSafeAddress(OpArg reg_value, X64Reg reg_addr, B
|
||||||
void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg & opAddress, int accessSize, s32 offset, BitSet32 registersInUse, bool signExtend, int flags)
|
void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg & opAddress, int accessSize, s32 offset, BitSet32 registersInUse, bool signExtend, int flags)
|
||||||
{
|
{
|
||||||
registersInUse[reg_value] = false;
|
registersInUse[reg_value] = false;
|
||||||
if (SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem &&
|
if (jit->jo.fastmem &&
|
||||||
!opAddress.IsImm() &&
|
!opAddress.IsImm() &&
|
||||||
!(flags & (SAFE_LOADSTORE_NO_SWAP | SAFE_LOADSTORE_NO_FASTMEM))
|
!(flags & (SAFE_LOADSTORE_NO_SWAP | SAFE_LOADSTORE_NO_FASTMEM))
|
||||||
#ifdef ENABLE_MEM_CHECK
|
#ifdef ENABLE_MEM_CHECK
|
||||||
|
@ -521,7 +521,7 @@ void EmuCodeBlock::SafeWriteRegToReg(OpArg reg_value, X64Reg reg_addr, int acces
|
||||||
reg_value = FixImmediate(accessSize, reg_value);
|
reg_value = FixImmediate(accessSize, reg_value);
|
||||||
|
|
||||||
// TODO: support byte-swapped non-immediate fastmem stores
|
// TODO: support byte-swapped non-immediate fastmem stores
|
||||||
if (SConfig::GetInstance().m_LocalCoreStartupParameter.bFastmem &&
|
if (jit->jo.fastmem &&
|
||||||
!(flags & SAFE_LOADSTORE_NO_FASTMEM) &&
|
!(flags & SAFE_LOADSTORE_NO_FASTMEM) &&
|
||||||
(reg_value.IsImm() || !(flags & SAFE_LOADSTORE_NO_SWAP))
|
(reg_value.IsImm() || !(flags & SAFE_LOADSTORE_NO_SWAP))
|
||||||
#ifdef ENABLE_MEM_CHECK
|
#ifdef ENABLE_MEM_CHECK
|
||||||
|
|
|
@ -9,7 +9,7 @@ void JitILBase::lhax(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
||||||
if (inst.RA)
|
if (inst.RA)
|
||||||
|
@ -24,7 +24,7 @@ void JitILBase::lhaux(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
||||||
addr = ibuild.EmitAdd(addr, ibuild.EmitLoadGReg(inst.RA));
|
addr = ibuild.EmitAdd(addr, ibuild.EmitLoadGReg(inst.RA));
|
||||||
|
@ -39,7 +39,7 @@ void JitILBase::lXz(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
if (inst.RA)
|
if (inst.RA)
|
||||||
|
@ -101,7 +101,7 @@ void JitILBase::lha(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst((s32)(s16)inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst((s32)(s16)inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ void JitILBase::lhau(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst((s32)inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst((s32)inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ void JitILBase::lXzx(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ void JitILBase::stX(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
IREmitter::InstLoc value = ibuild.EmitLoadGReg(inst.RS);
|
IREmitter::InstLoc value = ibuild.EmitLoadGReg(inst.RS);
|
||||||
|
@ -234,7 +234,7 @@ void JitILBase::stXx(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
||||||
IREmitter::InstLoc value = ibuild.EmitLoadGReg(inst.RS);
|
IREmitter::InstLoc value = ibuild.EmitLoadGReg(inst.RS);
|
||||||
|
@ -266,7 +266,7 @@ void JitILBase::lmw(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -285,7 +285,7 @@ void JitILBase::stmw(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreOff);
|
JITDISABLE(bJITLoadStoreOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ void JitILBase::lfs(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ void JitILBase::lfsu(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ void JitILBase::lfd(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ void JitILBase::lfdu(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ void JitILBase::stfd(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
IREmitter::InstLoc val = ibuild.EmitLoadFReg(inst.RS);
|
IREmitter::InstLoc val = ibuild.EmitLoadFReg(inst.RS);
|
||||||
|
@ -93,7 +93,7 @@ void JitILBase::stfs(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_16);
|
||||||
IREmitter::InstLoc val = ibuild.EmitLoadFReg(inst.RS);
|
IREmitter::InstLoc val = ibuild.EmitLoadFReg(inst.RS);
|
||||||
|
@ -112,7 +112,7 @@ void JitILBase::stfsx(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB);
|
||||||
IREmitter::InstLoc val = ibuild.EmitLoadFReg(inst.RS);
|
IREmitter::InstLoc val = ibuild.EmitLoadFReg(inst.RS);
|
||||||
|
@ -129,7 +129,7 @@ void JitILBase::lfsx(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStoreFloatingOff);
|
JITDISABLE(bJITLoadStoreFloatingOff);
|
||||||
FALLBACK_IF(js.memcheck);
|
FALLBACK_IF(jo.memcheck);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB), val;
|
IREmitter::InstLoc addr = ibuild.EmitLoadGReg(inst.RB), val;
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ void JitILBase::psq_st(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStorePairedOff);
|
JITDISABLE(bJITLoadStorePairedOff);
|
||||||
FALLBACK_IF(js.memcheck || inst.W);
|
FALLBACK_IF(jo.memcheck || inst.W);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_12);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_12);
|
||||||
IREmitter::InstLoc val;
|
IREmitter::InstLoc val;
|
||||||
|
@ -29,7 +29,7 @@ void JitILBase::psq_l(UGeckoInstruction inst)
|
||||||
{
|
{
|
||||||
INSTRUCTION_START
|
INSTRUCTION_START
|
||||||
JITDISABLE(bJITLoadStorePairedOff);
|
JITDISABLE(bJITLoadStorePairedOff);
|
||||||
FALLBACK_IF(js.memcheck || inst.W);
|
FALLBACK_IF(jo.memcheck || inst.W);
|
||||||
|
|
||||||
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_12);
|
IREmitter::InstLoc addr = ibuild.EmitIntConst(inst.SIMM_12);
|
||||||
IREmitter::InstLoc val;
|
IREmitter::InstLoc val;
|
||||||
|
|
Loading…
Reference in New Issue