Merge pull request #6863 from booto/jit-ps-dsi

Prevent paired singles routines clobbering PC,SRR0
This commit is contained in:
Markus Wick 2018-05-15 11:07:21 +02:00 committed by GitHub
commit b4324847fb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 35 additions and 11 deletions

View File

@ -71,6 +71,8 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
}
else
{
// Stash PC in case asm_routine causes exception
MOV(32, PPCSTATE(pc), Imm32(g_jit->js.compilerPC));
// We know what GQR is here, so we can load RSCRATCH2 and call into the store method directly
// with just the scale bits.
MOV(32, R(RSCRATCH2), Imm32(gqrValue & 0x3F00));
@ -83,6 +85,8 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
}
else
{
// Stash PC incase asm_routine causes exception
MOV(32, PPCSTATE(pc), Imm32(g_jit->js.compilerPC));
// Some games (e.g. Dirt 2) incorrectly set the unused bits which breaks the lookup table code.
// Hence, we need to mask out the unused bits. The layout of the GQR register is
// UU[SCALE]UUUUU[TYPE] where SCALE is 6 bits and TYPE is 3 bits, so we have to AND with
@ -148,10 +152,11 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
}
else
{
// Stash PC in case asm_routine causes exception
MOV(32, PPCSTATE(pc), Imm32(g_jit->js.compilerPC));
// Get the high part of the GQR register
OpArg gqr = PPCSTATE(spr[SPR_GQR0 + i]);
gqr.AddMemOffset(2);
MOV(32, R(RSCRATCH2), Imm32(0x3F07));
AND(32, R(RSCRATCH2), gqr);
LEA(64, RSCRATCH, M(w ? asm_routines.singleLoadQuantized : asm_routines.pairedLoadQuantized));

View File

@ -317,7 +317,8 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg& opAddress,
bool slowmem = (flags & SAFE_LOADSTORE_FORCE_SLOWMEM) != 0;
registersInUse[reg_value] = false;
if (g_jit->jo.fastmem && !(flags & SAFE_LOADSTORE_NO_FASTMEM) && !slowmem)
if (g_jit->jo.fastmem && !(flags & (SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_UPDATE_PC)) &&
!slowmem)
{
u8* backpatchStart = GetWritableCodePtr();
MovInfo mov;
@ -378,7 +379,11 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg& opAddress,
}
// Helps external systems know which instruction triggered the read.
// Invalid for calls from Jit64AsmCommon routines
if (!(flags & SAFE_LOADSTORE_NO_UPDATE_PC))
{
MOV(32, PPCSTATE(pc), Imm32(g_jit->js.compilerPC));
}
size_t rsp_alignment = (flags & SAFE_LOADSTORE_NO_PROLOG) ? 8 : 0;
ABI_PushRegistersAndAdjustStack(registersInUse, rsp_alignment);
@ -483,7 +488,8 @@ void EmuCodeBlock::SafeWriteRegToReg(OpArg reg_value, X64Reg reg_addr, int acces
// set the correct immediate format
reg_value = FixImmediate(accessSize, reg_value);
if (g_jit->jo.fastmem && !(flags & SAFE_LOADSTORE_NO_FASTMEM) && !slowmem)
if (g_jit->jo.fastmem && !(flags & (SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_UPDATE_PC)) &&
!slowmem)
{
u8* backpatchStart = GetWritableCodePtr();
MovInfo mov;
@ -540,7 +546,11 @@ void EmuCodeBlock::SafeWriteRegToReg(OpArg reg_value, X64Reg reg_addr, int acces
}
// PC is used by memory watchpoints (if enabled) or to print accurate PC locations in debug logs
// Invalid for calls from Jit64AsmCommon routines
if (!(flags & SAFE_LOADSTORE_NO_UPDATE_PC))
{
MOV(32, PPCSTATE(pc), Imm32(g_jit->js.compilerPC));
}
size_t rsp_alignment = (flags & SAFE_LOADSTORE_NO_PROLOG) ? 8 : 0;
ABI_PushRegistersAndAdjustStack(registersInUse, rsp_alignment);

View File

@ -77,6 +77,8 @@ public:
// Force slowmem (used when generating fallbacks in trampolines)
SAFE_LOADSTORE_FORCE_SLOWMEM = 16,
SAFE_LOADSTORE_DR_ON = 32,
// Generated from a context that doesn't have the PC of the instruction that caused it
SAFE_LOADSTORE_NO_UPDATE_PC = 64,
};
void SafeLoadToReg(Gen::X64Reg reg_value, const Gen::OpArg& opAddress, int accessSize, s32 offset,

View File

@ -419,8 +419,9 @@ void QuantizedMemoryRoutines::GenQuantizedStore(bool single, EQuantizeType type,
}
}
int flags =
isInline ? 0 : SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG | SAFE_LOADSTORE_DR_ON;
int flags = isInline ? 0 :
SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG |
SAFE_LOADSTORE_DR_ON | SAFE_LOADSTORE_NO_UPDATE_PC;
if (!single)
flags |= SAFE_LOADSTORE_NO_SWAP;
@ -478,8 +479,9 @@ void QuantizedMemoryRoutines::GenQuantizedLoad(bool single, EQuantizeType type,
if (g_jit->jo.memcheck)
{
BitSet32 regsToSave = QUANTIZED_REGS_TO_SAVE_LOAD;
int flags =
isInline ? 0 : SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG | SAFE_LOADSTORE_DR_ON;
int flags = isInline ? 0 :
SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG |
SAFE_LOADSTORE_DR_ON | SAFE_LOADSTORE_NO_UPDATE_PC;
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), size, 0, regsToSave, extend, flags);
if (!single && (type == QUANTIZE_U8 || type == QUANTIZE_S8))
{
@ -604,8 +606,9 @@ void QuantizedMemoryRoutines::GenQuantizedLoadFloat(bool single, bool isInline)
if (g_jit->jo.memcheck)
{
BitSet32 regsToSave = QUANTIZED_REGS_TO_SAVE;
int flags =
isInline ? 0 : SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG | SAFE_LOADSTORE_DR_ON;
int flags = isInline ? 0 :
SAFE_LOADSTORE_NO_FASTMEM | SAFE_LOADSTORE_NO_PROLOG |
SAFE_LOADSTORE_DR_ON | SAFE_LOADSTORE_NO_UPDATE_PC;
SafeLoadToReg(RSCRATCH_EXTRA, R(RSCRATCH_EXTRA), size, 0, regsToSave, extend, flags);
}

View File

@ -32,12 +32,14 @@ public:
// Out: XMM0: Bottom two 32-bit slots hold the read value,
// converted to a pair of floats.
// Trashes: all three RSCRATCH
// Note: Store PC if this could cause an exception
const u8** pairedLoadQuantized;
// In: array index: GQR to use.
// In: ECX: Address to read from.
// Out: XMM0: Bottom 32-bit slot holds the read value.
// Trashes: all three RSCRATCH
// Note: Store PC if this could cause an exception
const u8** singleLoadQuantized;
// In: array index: GQR to use.
@ -45,10 +47,12 @@ public:
// In: XMM0: Bottom two 32-bit slots hold the pair of floats to be written.
// Out: Nothing.
// Trashes: all three RSCRATCH
// Note: Store PC if this could cause an exception
const u8** pairedStoreQuantized;
// In: array index: GQR to use.
// In: ECX: Address to write to.
// In: XMM0: Bottom 32-bit slot holds the float to be written.
// Note: Store PC if this could cause an exception
const u8** singleStoreQuantized;
};