Merge pull request #11958 from JosJuice/jitarm64-dispatcher-microopt

JitArm64: Dispatcher optimizations
This commit is contained in:
Mai 2023-11-29 16:54:09 -05:00 committed by GitHub
commit 89963c287c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 50 additions and 50 deletions

View File

@ -19,11 +19,6 @@
#include "Core/System.h"
using namespace Gen;
// These need to be next of each other so that the assembly
// code can compare them easily.
static_assert(offsetof(JitBlockData, effectiveAddress) + 4 == offsetof(JitBlockData, msrBits));
Jit64AsmRoutineManager::Jit64AsmRoutineManager(Jit64& jit) : CommonAsmRoutines(jit)
{
}
@ -168,12 +163,14 @@ void Jit64AsmRoutineManager::Generate()
// Check block.msrBits.
MOV(32, R(RSCRATCH2), PPCSTATE(msr));
AND(32, R(RSCRATCH2), Imm32(JitBaseBlockCache::JIT_CACHE_MSR_MASK));
// Also check the block.effectiveAddress
SHL(64, R(RSCRATCH2), Imm8(32));
// RSCRATCH_EXTRA still has the PC.
// Also check the block.effectiveAddress. RSCRATCH_EXTRA still has the PC.
SHL(64, R(RSCRATCH_EXTRA), Imm8(32));
OR(64, R(RSCRATCH2), R(RSCRATCH_EXTRA));
CMP(64, R(RSCRATCH2),
MDisp(RSCRATCH, static_cast<s32>(offsetof(JitBlockData, effectiveAddress))));
static_assert(offsetof(JitBlockData, msrBits) + 4 ==
offsetof(JitBlockData, effectiveAddress));
CMP(64, R(RSCRATCH2), MDisp(RSCRATCH, static_cast<s32>(offsetof(JitBlockData, msrBits))));
state_mismatch = J_CC(CC_NE);
// Success; branch to the block we found.

View File

@ -41,17 +41,17 @@ void JitArm64::GenerateAsm()
enter_code = GetCodePtr();
ABI_PushRegisters(regs_to_save);
m_float_emit.ABI_PushRegisters(regs_to_save_fpr, ARM64Reg::X30);
m_float_emit.ABI_PushRegisters(regs_to_save_fpr, ARM64Reg::X8);
MOVP2R(PPC_REG, &m_ppc_state);
// Store the stack pointer, so we can reset it if the BLR optimization fails.
ADD(ARM64Reg::X0, ARM64Reg::SP, 0);
STR(IndexType::Unsigned, ARM64Reg::X0, PPC_REG, PPCSTATE_OFF(stored_stack_pointer));
ADD(ARM64Reg::X8, ARM64Reg::SP, 0);
STR(IndexType::Unsigned, ARM64Reg::X8, PPC_REG, PPCSTATE_OFF(stored_stack_pointer));
// Push {nullptr; -1} as invalid destination on the stack.
MOVI2R(ARM64Reg::X0, 0xFFFF'FFFF'FFFF'FFFF);
STP(IndexType::Pre, ARM64Reg::ZR, ARM64Reg::X0, ARM64Reg::SP, -16);
MOVI2R(ARM64Reg::X8, 0xFFFF'FFFF'FFFF'FFFF);
STP(IndexType::Pre, ARM64Reg::ZR, ARM64Reg::X8, ARM64Reg::SP, -16);
// The PC will be loaded into DISPATCHER_PC after the call to CoreTiming::Advance().
// Advance() does an exception check so we don't know what PC to use until afterwards.
@ -86,9 +86,9 @@ void JitArm64::GenerateAsm()
FixupBranch debug_exit;
if (enable_debugging)
{
LDR(IndexType::Unsigned, ARM64Reg::W0, ARM64Reg::X0,
MOVPage2R(ARM64Reg::X0, cpu.GetStatePtr()));
debug_exit = CBNZ(ARM64Reg::W0);
LDR(IndexType::Unsigned, ARM64Reg::W8, ARM64Reg::X8,
MOVPage2R(ARM64Reg::X8, cpu.GetStatePtr()));
debug_exit = CBNZ(ARM64Reg::W8);
}
dispatcher_no_check = GetCodePtr();
@ -100,9 +100,9 @@ void JitArm64::GenerateAsm()
if (GetBlockCache()->GetEntryPoints())
{
// Check if there is a block
ARM64Reg pc_and_msr = ARM64Reg::X25;
ARM64Reg cache_base = ARM64Reg::X27;
ARM64Reg block = ARM64Reg::X30;
ARM64Reg pc_and_msr = ARM64Reg::X8;
ARM64Reg cache_base = ARM64Reg::X9;
ARM64Reg block = ARM64Reg::X10;
LDR(IndexType::Unsigned, EncodeRegTo32(pc_and_msr), PPC_REG, PPCSTATE_OFF(msr));
MOVP2R(cache_base, GetBlockCache()->GetEntryPoints());
// The entry points map is indexed by ((msrBits << 26) | (address >> 2)).
@ -115,37 +115,40 @@ void JitArm64::GenerateAsm()
}
else
{
ARM64Reg pc_masked = ARM64Reg::W8;
ARM64Reg cache_base = ARM64Reg::X9;
ARM64Reg block = ARM64Reg::X10;
ARM64Reg pc = ARM64Reg::W11;
ARM64Reg msr = ARM64Reg::W12;
ARM64Reg msr2 = ARM64Reg::W13;
ARM64Reg entry = ARM64Reg::X14;
// iCache[(address >> 2) & iCache_Mask];
ARM64Reg pc_masked = ARM64Reg::W25;
ARM64Reg cache_base = ARM64Reg::X27;
ARM64Reg block = ARM64Reg::X30;
ORR(pc_masked, ARM64Reg::WZR,
LogicalImm(JitBaseBlockCache::FAST_BLOCK_MAP_FALLBACK_MASK << 3, 32));
AND(pc_masked, pc_masked, DISPATCHER_PC, ArithOption(DISPATCHER_PC, ShiftType::LSL, 1));
MOVP2R(cache_base, GetBlockCache()->GetFastBlockMapFallback());
LDR(block, cache_base, EncodeRegTo64(pc_masked));
UBFX(pc_masked, DISPATCHER_PC, 2,
MathUtil::IntLog2(JitBaseBlockCache::FAST_BLOCK_MAP_FALLBACK_ELEMENTS) - 2);
LDR(block, cache_base, ArithOption(EncodeRegTo64(pc_masked), true));
FixupBranch not_found = CBZ(block);
// b.effectiveAddress != addr || b.msrBits != msr
ARM64Reg pc_and_msr = ARM64Reg::W25;
ARM64Reg pc_and_msr2 = ARM64Reg::W24;
LDR(IndexType::Unsigned, pc_and_msr, block, offsetof(JitBlockData, effectiveAddress));
CMP(pc_and_msr, DISPATCHER_PC);
FixupBranch pc_missmatch = B(CC_NEQ);
static_assert(offsetof(JitBlockData, msrBits) + 4 ==
offsetof(JitBlockData, effectiveAddress));
LDP(IndexType::Signed, msr, pc, block, offsetof(JitBlockData, effectiveAddress));
LDR(IndexType::Unsigned, msr2, PPC_REG, PPCSTATE_OFF(msr));
CMP(pc, DISPATCHER_PC);
FixupBranch pc_mismatch = B(CC_NEQ);
LDR(IndexType::Unsigned, pc_and_msr2, PPC_REG, PPCSTATE_OFF(msr));
AND(pc_and_msr2, pc_and_msr2, LogicalImm(JitBaseBlockCache::JIT_CACHE_MSR_MASK, 32));
LDR(IndexType::Unsigned, pc_and_msr, block, offsetof(JitBlockData, msrBits));
CMP(pc_and_msr, pc_and_msr2);
FixupBranch msr_missmatch = B(CC_NEQ);
LDR(IndexType::Unsigned, entry, block, offsetof(JitBlockData, normalEntry));
AND(msr2, msr2, LogicalImm(JitBaseBlockCache::JIT_CACHE_MSR_MASK, 32));
CMP(msr, msr2);
FixupBranch msr_mismatch = B(CC_NEQ);
// return blocks[block_num].normalEntry;
LDR(IndexType::Unsigned, block, block, offsetof(JitBlockData, normalEntry));
BR(block);
BR(entry);
SetJumpTarget(not_found);
SetJumpTarget(pc_missmatch);
SetJumpTarget(msr_missmatch);
SetJumpTarget(pc_mismatch);
SetJumpTarget(msr_mismatch);
}
}
@ -182,8 +185,8 @@ void JitArm64::GenerateAsm()
// Check the state pointer to see if we are exiting
// Gets checked on at the end of every slice
LDR(IndexType::Unsigned, ARM64Reg::W0, ARM64Reg::X0, MOVPage2R(ARM64Reg::X0, cpu.GetStatePtr()));
FixupBranch exit = CBNZ(ARM64Reg::W0);
LDR(IndexType::Unsigned, ARM64Reg::W8, ARM64Reg::X8, MOVPage2R(ARM64Reg::X8, cpu.GetStatePtr()));
FixupBranch exit = CBNZ(ARM64Reg::W8);
SetJumpTarget(to_start_of_timing_slice);
ABI_CallFunction(&CoreTiming::GlobalAdvance);
@ -212,10 +215,10 @@ void JitArm64::GenerateAsm()
// Reset the stack pointer, since the BLR optimization may have pushed things onto the stack
// without popping them.
LDR(IndexType::Unsigned, ARM64Reg::X0, PPC_REG, PPCSTATE_OFF(stored_stack_pointer));
ADD(ARM64Reg::SP, ARM64Reg::X0, 0);
LDR(IndexType::Unsigned, ARM64Reg::X8, PPC_REG, PPCSTATE_OFF(stored_stack_pointer));
ADD(ARM64Reg::SP, ARM64Reg::X8, 0);
m_float_emit.ABI_PopRegisters(regs_to_save_fpr, ARM64Reg::X30);
m_float_emit.ABI_PopRegisters(regs_to_save_fpr, ARM64Reg::X8);
ABI_PopRegisters(regs_to_save);
RET(ARM64Reg::X30);

View File

@ -33,10 +33,10 @@ struct JitBlockData
// The normal entry point for the block, returned by Dispatch().
u8* normalEntry;
// The effective address (PC) for the beginning of the block.
u32 effectiveAddress;
// The MSR bits expected for this block to be valid; see JIT_CACHE_MSR_MASK.
u32 msrBits;
// The effective address (PC) for the beginning of the block.
u32 effectiveAddress;
// The physical address of the code represented by this block.
// Various maps in the cache are indexed by this (block_map
// and valid_block in particular). This is useful because of