Reorganize backpatching a bit. Untested on ARM.

Rather than *MemTools.cpp checking whether the address is in the
emulated range itself (which, as of the next commit, doesn't cover every
kind of access the JIT might want to intercept) and doing PC
replacement, they just pass the access address and context to
jit->HandleFault, which does the rest itself.

Because SContext is now in JitInterface, I wanted JitBackpatch.h (which
defines it) to be lightweight, so I moved TrampolineCache and associated
x64{Analyzer,Emitter} dependencies into its own file.  I hate adding new
files in three places, two of which are MSVC...

While I'm at it, edit a misleading comment.
This commit is contained in:
comex 2014-09-11 00:24:22 -04:00
parent 7b0fdb52cd
commit 755bd2c445
15 changed files with 258 additions and 261 deletions

View File

@ -32,9 +32,9 @@ typedef struct ucontext {
} ucontext_t; } ucontext_t;
#endif #endif
void sigsegv_handler(int signal, siginfo_t *info, void *raw_context) static void sigsegv_handler(int sig, siginfo_t *info, void *raw_context)
{ {
if (signal != SIGSEGV) if (sig != SIGSEGV)
{ {
// We are not interested in other signals - handle it as usual. // We are not interested in other signals - handle it as usual.
return; return;
@ -47,33 +47,18 @@ void sigsegv_handler(int signal, siginfo_t *info, void *raw_context)
return; return;
} }
// Get all the information we can out of the context. // Get all the information we can out of the context.
mcontext_t *ctx = &context->uc_mcontext; mcontext_t *ctx = &context->uc_mcontext;
void *fault_memory_ptr = (void*)ctx->arm_r10; // comex says hello, and is most curious whether this is arm_r10 for a
u8 *fault_instruction_ptr = (u8 *)ctx->arm_pc; // reason as opposed to si_addr like the x64MemTools.cpp version. Is there
// even a need for this file to be architecture specific?
uintptr_t fault_memory_ptr = (uintptr_t)ctx->arm_r10;
if (!JitInterface::IsInCodeSpace(fault_instruction_ptr)) if (!JitInterface::HandleFault(fault_memory_ptr, ctx))
{ {
// Let's not prevent debugging. // retry and crash
return; signal(SIGSEGV, SIG_DFL);
}
u64 bad_address = (u64)fault_memory_ptr;
u64 memspace_bottom = (u64)Memory::base;
if (bad_address < memspace_bottom)
{
PanicAlertT("Exception handler - access below memory space. %08llx%08llx",
bad_address >> 32, bad_address);
}
u32 em_address = (u32)(bad_address - memspace_bottom);
const u8 *new_rip = jit->BackPatch(fault_instruction_ptr, em_address, ctx);
if (new_rip)
{
ctx->arm_pc = (u32) new_rip;
} }
} }

View File

@ -195,9 +195,10 @@ if(_M_X86)
PowerPC/Jit64/Jit_Paired.cpp PowerPC/Jit64/Jit_Paired.cpp
PowerPC/Jit64/JitRegCache.cpp PowerPC/Jit64/JitRegCache.cpp
PowerPC/Jit64/Jit_SystemRegisters.cpp PowerPC/Jit64/Jit_SystemRegisters.cpp
PowerPC/JitCommon/JitBackpatch.cpp
PowerPC/JitCommon/JitAsmCommon.cpp PowerPC/JitCommon/JitAsmCommon.cpp
PowerPC/JitCommon/Jit_Util.cpp) PowerPC/JitCommon/JitBackpatch.cpp
PowerPC/JitCommon/Jit_Util.cpp
PowerPC/JitCommon/TrampolineCache.cpp)
elseif(_M_ARM_32) elseif(_M_ARM_32)
set(SRCS ${SRCS} set(SRCS ${SRCS}
ArmMemTools.cpp ArmMemTools.cpp

View File

@ -229,6 +229,7 @@
<ClCompile Include="PowerPC\JitCommon\JitBase.cpp" /> <ClCompile Include="PowerPC\JitCommon\JitBase.cpp" />
<ClCompile Include="PowerPC\JitCommon\JitCache.cpp" /> <ClCompile Include="PowerPC\JitCommon\JitCache.cpp" />
<ClCompile Include="PowerPC\JitCommon\Jit_Util.cpp" /> <ClCompile Include="PowerPC\JitCommon\Jit_Util.cpp" />
<ClCompile Include="PowerPC\JitCommon\TrampolineCache.cpp" />
<ClCompile Include="PowerPC\JitInterface.cpp" /> <ClCompile Include="PowerPC\JitInterface.cpp" />
<ClCompile Include="PowerPC\PowerPC.cpp" /> <ClCompile Include="PowerPC\PowerPC.cpp" />
<ClCompile Include="PowerPC\PPCAnalyst.cpp" /> <ClCompile Include="PowerPC\PPCAnalyst.cpp" />
@ -406,6 +407,7 @@
<ClInclude Include="PowerPC\JitCommon\JitBase.h" /> <ClInclude Include="PowerPC\JitCommon\JitBase.h" />
<ClInclude Include="PowerPC\JitCommon\JitCache.h" /> <ClInclude Include="PowerPC\JitCommon\JitCache.h" />
<ClInclude Include="PowerPC\JitCommon\Jit_Util.h" /> <ClInclude Include="PowerPC\JitCommon\Jit_Util.h" />
<ClInclude Include="PowerPC\JitCommon\TrampolineCache.h" />
<ClInclude Include="PowerPC\JitInterface.h" /> <ClInclude Include="PowerPC\JitInterface.h" />
<ClInclude Include="PowerPC\PowerPC.h" /> <ClInclude Include="PowerPC\PowerPC.h" />
<ClInclude Include="PowerPC\PPCAnalyst.h" /> <ClInclude Include="PowerPC\PPCAnalyst.h" />

View File

@ -640,6 +640,9 @@
<ClCompile Include="PowerPC\JitCommon\JitCache.cpp"> <ClCompile Include="PowerPC\JitCommon\JitCache.cpp">
<Filter>PowerPC\JitCommon</Filter> <Filter>PowerPC\JitCommon</Filter>
</ClCompile> </ClCompile>
<ClCompile Include="PowerPC\JitCommon\TrampolineCache.cpp">
<Filter>PowerPC\JitCommon</Filter>
</ClCompile>
<ClCompile Include="PowerPC\Jit64IL\IR_X86.cpp"> <ClCompile Include="PowerPC\Jit64IL\IR_X86.cpp">
<Filter>PowerPC\JitIL</Filter> <Filter>PowerPC\JitIL</Filter>
</ClCompile> </ClCompile>
@ -1182,6 +1185,9 @@
<ClInclude Include="PowerPC\JitCommon\JitCache.h"> <ClInclude Include="PowerPC\JitCommon\JitCache.h">
<Filter>PowerPC\JitCommon</Filter> <Filter>PowerPC\JitCommon</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="PowerPC\JitCommon\TrampolineCache.h">
<Filter>PowerPC\JitCommon</Filter>
</ClInclude>
<ClInclude Include="PowerPC\Jit64IL\JitIL.h"> <ClInclude Include="PowerPC\Jit64IL\JitIL.h">
<Filter>PowerPC\JitIL</Filter> <Filter>PowerPC\JitIL</Filter>
</ClInclude> </ClInclude>

View File

@ -56,6 +56,10 @@ public:
void Trace(); void Trace();
JitBlockCache *GetBlockCache() override { return &blocks; }
bool HandleFault(uintptr_t access_address, SContext* ctx) override { return false; }
void ClearCache() override; void ClearCache() override;
const u8 *GetDispatcher() const u8 *GetDispatcher()
{ {
@ -105,4 +109,5 @@ public:
void DynaRunTable31(UGeckoInstruction _inst) override; void DynaRunTable31(UGeckoInstruction _inst) override;
void DynaRunTable59(UGeckoInstruction _inst) override; void DynaRunTable59(UGeckoInstruction _inst) override;
void DynaRunTable63(UGeckoInstruction _inst) override; void DynaRunTable63(UGeckoInstruction _inst) override;
}; };

View File

@ -58,6 +58,8 @@ private:
void SetFPException(ArmGen::ARMReg Reg, u32 Exception); void SetFPException(ArmGen::ARMReg Reg, u32 Exception);
ArmGen::FixupBranch JumpIfCRFieldBit(int field, int bit, bool jump_if_set); ArmGen::FixupBranch JumpIfCRFieldBit(int field, int bit, bool jump_if_set);
bool BackPatch(SContext* ctx);
public: public:
JitArm() : code_buffer(32000) {} JitArm() : code_buffer(32000) {}
~JitArm() {} ~JitArm() {}
@ -72,9 +74,7 @@ public:
JitBaseBlockCache *GetBlockCache() { return &blocks; } JitBaseBlockCache *GetBlockCache() { return &blocks; }
const u8 *BackPatch(u8 *codePtr, u32 em_address, void *ctx); bool HandleFault(uintptr_t access_address, SContext* ctx) override;
bool IsInCodeSpace(u8 *ptr) { return IsInSpace(ptr); }
void Trace(); void Trace();

View File

@ -66,12 +66,23 @@ bool DisamLoadStore(const u32 inst, ARMReg &rD, u8 &accessSize, bool &Store)
} }
return true; return true;
} }
const u8 *JitArm::BackPatch(u8 *codePtr, u32, void *ctx_void)
bool JitArm::HandleFault(uintptr_t access_address, SContext* ctx)
{
if (access_address < (uintptr_t)Memory::base)
{
PanicAlertT("Exception handler - access below memory space. %08llx%08llx",
access_address >> 32, access_address);
}
return BackPatch(ctx);
}
bool JitArm::BackPatch(SContext* ctx)
{ {
// TODO: This ctx needs to be filled with our information // TODO: This ctx needs to be filled with our information
SContext *ctx = (SContext *)ctx_void;
// We need to get the destination register before we start // We need to get the destination register before we start
u8* codePtr = (u8*)ctx->CTX_PC;
u32 Value = *(u32*)codePtr; u32 Value = *(u32*)codePtr;
ARMReg rD; ARMReg rD;
u8 accessSize; u8 accessSize;
@ -109,7 +120,7 @@ const u8 *JitArm::BackPatch(u8 *codePtr, u32, void *ctx_void)
u32 newPC = ctx->CTX_PC - (ARMREGOFFSET + 4 * 4); u32 newPC = ctx->CTX_PC - (ARMREGOFFSET + 4 * 4);
ctx->CTX_PC = newPC; ctx->CTX_PC = newPC;
emitter.FlushIcache(); emitter.FlushIcache();
return (u8*)ctx->CTX_PC; return true;
} }
else else
{ {
@ -135,7 +146,7 @@ const u8 *JitArm::BackPatch(u8 *codePtr, u32, void *ctx_void)
emitter.MOV(rD, R14); // 8 emitter.MOV(rD, R14); // 8
ctx->CTX_PC -= ARMREGOFFSET + (4 * 4); ctx->CTX_PC -= ARMREGOFFSET + (4 * 4);
emitter.FlushIcache(); emitter.FlushIcache();
return (u8*)ctx->CTX_PC; return true;
} }
return 0; return 0;
} }

View File

@ -3,24 +3,14 @@
// Refer to the license.txt file included. // Refer to the license.txt file included.
#include <cinttypes> #include <cinttypes>
#include <string>
#include "disasm.h" #include "disasm.h"
#include "Common/CommonTypes.h"
#include "Common/StringUtil.h"
#include "Core/PowerPC/JitCommon/JitBackpatch.h" #include "Core/PowerPC/JitCommon/JitBackpatch.h"
#include "Core/PowerPC/JitCommon/JitBase.h" #include "Core/PowerPC/JitCommon/JitBase.h"
#ifdef _WIN32
#include <windows.h>
#endif
using namespace Gen; using namespace Gen;
extern u8 *trampolineCodePtr;
static void BackPatchError(const std::string &text, u8 *codePtr, u32 emAddress) static void BackPatchError(const std::string &text, u8 *codePtr, u32 emAddress)
{ {
u64 code_addr = (u64)codePtr; u64 code_addr = (u64)codePtr;
@ -35,176 +25,51 @@ static void BackPatchError(const std::string &text, u8 *codePtr, u32 emAddress)
return; return;
} }
void TrampolineCache::Init() // This generates some fairly heavy trampolines, but it doesn't really hurt.
// Only instructions that access I/O will get these, and there won't be that
// many of them in a typical program/game.
bool Jitx86Base::HandleFault(uintptr_t access_address, SContext* ctx)
{ {
AllocCodeSpace(4 * 1024 * 1024); // TODO: do we properly handle off-the-end?
if (access_address >= (uintptr_t)Memory::base && access_address < (uintptr_t)Memory::base + 0x100010000)
return BackPatch((u32)(access_address - (uintptr_t)Memory::base), ctx);
return false;
} }
void TrampolineCache::Shutdown() bool Jitx86Base::BackPatch(u32 emAddress, SContext* ctx)
{ {
FreeCodeSpace(); u8* codePtr = (u8*) ctx->CTX_PC;
}
// Extremely simplistic - just generate the requested trampoline. May reuse them in the future. if (!IsInSpace(codePtr))
const u8 *TrampolineCache::GetReadTrampoline(const InstructionInfo &info, u32 registersInUse) return false; // this will become a regular crash real soon after this
{
if (GetSpaceLeft() < 1024)
PanicAlert("Trampoline cache full");
const u8 *trampoline = GetCodePtr();
X64Reg addrReg = (X64Reg)info.scaledReg;
X64Reg dataReg = (X64Reg)info.regOperandReg;
// It's a read. Easy.
// RSP alignment here is 8 due to the call.
ABI_PushRegistersAndAdjustStack(registersInUse, 8);
if (addrReg != ABI_PARAM1)
MOV(32, R(ABI_PARAM1), R((X64Reg)addrReg));
if (info.displacement)
ADD(32, R(ABI_PARAM1), Imm32(info.displacement));
switch (info.operandSize)
{
case 4:
CALL((void *)&Memory::Read_U32);
break;
case 2:
CALL((void *)&Memory::Read_U16);
SHL(32, R(ABI_RETURN), Imm8(16));
break;
case 1:
CALL((void *)&Memory::Read_U8);
break;
}
if (info.signExtend && info.operandSize == 1)
{
// Need to sign extend value from Read_U8.
MOVSX(32, 8, dataReg, R(ABI_RETURN));
}
else if (dataReg != EAX)
{
MOV(32, R(dataReg), R(ABI_RETURN));
}
ABI_PopRegistersAndAdjustStack(registersInUse, 8);
RET();
return trampoline;
}
// Extremely simplistic - just generate the requested trampoline. May reuse them in the future.
const u8 *TrampolineCache::GetWriteTrampoline(const InstructionInfo &info, u32 registersInUse, u32 pc)
{
if (GetSpaceLeft() < 1024)
PanicAlert("Trampoline cache full");
const u8 *trampoline = GetCodePtr();
X64Reg dataReg = (X64Reg)info.regOperandReg;
X64Reg addrReg = (X64Reg)info.scaledReg;
// It's a write. Yay. Remember that we don't have to be super efficient since it's "just" a
// hardware access - we can take shortcuts.
// Don't treat FIFO writes specially for now because they require a burst
// check anyway.
// PC is used by memory watchpoints (if enabled) or to print accurate PC locations in debug logs
MOV(32, PPCSTATE(pc), Imm32(pc));
ABI_PushRegistersAndAdjustStack(registersInUse, 8);
if (info.hasImmediate)
{
if (addrReg != ABI_PARAM2)
MOV(64, R(ABI_PARAM2), R(addrReg));
// we have to swap back the immediate to pass it to the write functions
switch (info.operandSize)
{
case 8:
PanicAlert("Invalid 64-bit immediate!");
break;
case 4:
MOV(32, R(ABI_PARAM1), Imm32(Common::swap32((u32)info.immediate)));
break;
case 2:
MOV(16, R(ABI_PARAM1), Imm16(Common::swap16((u16)info.immediate)));
break;
case 1:
MOV(8, R(ABI_PARAM1), Imm8((u8)info.immediate));
break;
}
}
else
{
MOVTwo(64, ABI_PARAM1, dataReg, ABI_PARAM2, addrReg);
}
if (info.displacement)
{
ADD(32, R(ABI_PARAM2), Imm32(info.displacement));
}
switch (info.operandSize)
{
case 8:
CALL((void *)&Memory::Write_U64);
break;
case 4:
CALL((void *)&Memory::Write_U32);
break;
case 2:
CALL((void *)&Memory::Write_U16);
break;
case 1:
CALL((void *)&Memory::Write_U8);
break;
}
ABI_PopRegistersAndAdjustStack(registersInUse, 8);
RET();
return trampoline;
}
// This generates some fairly heavy trampolines, but:
// 1) It's really necessary. We don't know anything about the context.
// 2) It doesn't really hurt. Only instructions that access I/O will get these, and there won't be
// that many of them in a typical program/game.
const u8 *Jitx86Base::BackPatch(u8 *codePtr, u32 emAddress, void *ctx_void)
{
SContext *ctx = (SContext *)ctx_void;
if (!jit->IsInCodeSpace(codePtr))
return nullptr; // this will become a regular crash real soon after this
InstructionInfo info = {}; InstructionInfo info = {};
if (!DisassembleMov(codePtr, &info)) if (!DisassembleMov(codePtr, &info))
{ {
BackPatchError("BackPatch - failed to disassemble MOV instruction", codePtr, emAddress); BackPatchError("BackPatch - failed to disassemble MOV instruction", codePtr, emAddress);
return nullptr; return false;
} }
if (info.otherReg != RMEM) if (info.otherReg != RMEM)
{ {
PanicAlert("BackPatch : Base reg not RMEM." PanicAlert("BackPatch : Base reg not RMEM."
"\n\nAttempted to access %08x.", emAddress); "\n\nAttempted to access %08x.", emAddress);
return nullptr; return false;
} }
if (info.byteSwap && info.instructionSize < BACKPATCH_SIZE) if (info.byteSwap && info.instructionSize < BACKPATCH_SIZE)
{ {
PanicAlert("BackPatch: MOVBE is too small"); PanicAlert("BackPatch: MOVBE is too small");
return nullptr; return false;
} }
auto it = registersInUseAtLoc.find(codePtr); auto it = registersInUseAtLoc.find(codePtr);
if (it == registersInUseAtLoc.end()) if (it == registersInUseAtLoc.end())
{ {
PanicAlert("BackPatch: no register use entry for address %p", codePtr); PanicAlert("BackPatch: no register use entry for address %p", codePtr);
return nullptr; return false;
} }
u32 registersInUse = it->second; u32 registersInUse = it->second;
@ -228,7 +93,7 @@ const u8 *Jitx86Base::BackPatch(u8 *codePtr, u32 emAddress, void *ctx_void)
{ {
emitter.NOP(padding); emitter.NOP(padding);
} }
return codePtr; ctx->CTX_PC = (u64)codePtr;
} }
else else
{ {
@ -281,6 +146,8 @@ const u8 *Jitx86Base::BackPatch(u8 *codePtr, u32 emAddress, void *ctx_void)
{ {
emitter.NOP(padding); emitter.NOP(padding);
} }
return start; ctx->CTX_PC = (u64)start;
} }
return true;
} }

View File

@ -5,11 +5,6 @@
#pragma once #pragma once
#include "Common/CommonTypes.h" #include "Common/CommonTypes.h"
#include "Common/x64Analyzer.h"
#include "Common/x64Emitter.h"
// We need at least this many bytes for backpatching.
const int BACKPATCH_SIZE = 5;
// meh. // meh.
#if defined(_WIN32) #if defined(_WIN32)
@ -147,8 +142,8 @@ const int BACKPATCH_SIZE = 5;
#endif #endif
#if _M_X86_64 #if _M_X86_64
#define CTX_PC CTX_RIP
#include <stddef.h> #include <stddef.h>
#define CTX_PC CTX_RIP
static inline u64 *ContextRN(SContext* ctx, int n) static inline u64 *ContextRN(SContext* ctx, int n)
{ {
static const u8 offsets[] = static const u8 offsets[] =
@ -173,13 +168,3 @@ static inline u64 *ContextRN(SContext* ctx, int n)
return (u64 *) ((char *) ctx + offsets[n]); return (u64 *) ((char *) ctx + offsets[n]);
} }
#endif #endif
class TrampolineCache : public Gen::X64CodeBlock
{
public:
void Init();
void Shutdown();
const u8 *GetReadTrampoline(const InstructionInfo &info, u32 registersInUse);
const u8 *GetWriteTrampoline(const InstructionInfo &info, u32 registersInUse, u32 pc);
};

View File

@ -26,6 +26,7 @@
#include "Core/PowerPC/JitCommon/JitAsmCommon.h" #include "Core/PowerPC/JitCommon/JitAsmCommon.h"
#include "Core/PowerPC/JitCommon/JitBackpatch.h" #include "Core/PowerPC/JitCommon/JitBackpatch.h"
#include "Core/PowerPC/JitCommon/JitCache.h" #include "Core/PowerPC/JitCommon/JitCache.h"
#include "Core/PowerPC/JitCommon/TrampolineCache.h"
// TODO: find a better place for x86-specific stuff // TODO: find a better place for x86-specific stuff
// The following register assignments are common to Jit64 and Jit64IL: // The following register assignments are common to Jit64 and Jit64IL:
@ -110,24 +111,20 @@ public:
virtual void Jit(u32 em_address) = 0; virtual void Jit(u32 em_address) = 0;
virtual const u8 *BackPatch(u8 *codePtr, u32 em_address, void *ctx) = 0;
virtual const CommonAsmRoutinesBase *GetAsmRoutines() = 0; virtual const CommonAsmRoutinesBase *GetAsmRoutines() = 0;
virtual bool IsInCodeSpace(u8 *ptr) = 0; virtual bool HandleFault(uintptr_t access_address, SContext* ctx) = 0;
}; };
class Jitx86Base : public JitBase, public EmuCodeBlock class Jitx86Base : public JitBase, public EmuCodeBlock
{ {
protected: protected:
bool BackPatch(u32 emAddress, SContext* ctx);
JitBlockCache blocks; JitBlockCache blocks;
TrampolineCache trampolines; TrampolineCache trampolines;
public: public:
JitBlockCache *GetBlockCache() override { return &blocks; } JitBlockCache *GetBlockCache() override { return &blocks; }
bool HandleFault(uintptr_t access_address, SContext* ctx) override;
const u8 *BackPatch(u8 *codePtr, u32 em_address, void *ctx) override;
bool IsInCodeSpace(u8 *ptr) override { return IsInSpace(ptr); }
}; };
extern JitBase *jit; extern JitBase *jit;

View File

@ -0,0 +1,156 @@
// Copyright 2013 Dolphin Emulator Project
// Licensed under GPLv2
// Refer to the license.txt file included.
#include <cinttypes>
#include <string>
#include "Common/CommonTypes.h"
#include "Common/StringUtil.h"
#include "Common/x64ABI.h"
#include "Core/HW/Memmap.h"
#include "Core/PowerPC/JitCommon/JitBase.h"
#include "Core/PowerPC/JitCommon/TrampolineCache.h"
#ifdef _WIN32
#include <windows.h>
#endif
using namespace Gen;
extern u8 *trampolineCodePtr;
void TrampolineCache::Init()
{
AllocCodeSpace(4 * 1024 * 1024);
}
void TrampolineCache::Shutdown()
{
FreeCodeSpace();
}
// Extremely simplistic - just generate the requested trampoline. May reuse them in the future.
const u8 *TrampolineCache::GetReadTrampoline(const InstructionInfo &info, u32 registersInUse)
{
if (GetSpaceLeft() < 1024)
PanicAlert("Trampoline cache full");
const u8 *trampoline = GetCodePtr();
X64Reg addrReg = (X64Reg)info.scaledReg;
X64Reg dataReg = (X64Reg)info.regOperandReg;
// It's a read. Easy.
// RSP alignment here is 8 due to the call.
ABI_PushRegistersAndAdjustStack(registersInUse, 8);
if (addrReg != ABI_PARAM1)
MOV(32, R(ABI_PARAM1), R((X64Reg)addrReg));
if (info.displacement)
ADD(32, R(ABI_PARAM1), Imm32(info.displacement));
switch (info.operandSize)
{
case 4:
CALL((void *)&Memory::Read_U32);
break;
case 2:
CALL((void *)&Memory::Read_U16);
SHL(32, R(ABI_RETURN), Imm8(16));
break;
case 1:
CALL((void *)&Memory::Read_U8);
break;
}
if (info.signExtend && info.operandSize == 1)
{
// Need to sign extend value from Read_U8.
MOVSX(32, 8, dataReg, R(ABI_RETURN));
}
else if (dataReg != EAX)
{
MOV(32, R(dataReg), R(ABI_RETURN));
}
ABI_PopRegistersAndAdjustStack(registersInUse, 8);
RET();
return trampoline;
}
// Extremely simplistic - just generate the requested trampoline. May reuse them in the future.
const u8 *TrampolineCache::GetWriteTrampoline(const InstructionInfo &info, u32 registersInUse, u32 pc)
{
if (GetSpaceLeft() < 1024)
PanicAlert("Trampoline cache full");
const u8 *trampoline = GetCodePtr();
X64Reg dataReg = (X64Reg)info.regOperandReg;
X64Reg addrReg = (X64Reg)info.scaledReg;
// It's a write. Yay. Remember that we don't have to be super efficient since it's "just" a
// hardware access - we can take shortcuts.
// Don't treat FIFO writes specially for now because they require a burst
// check anyway.
// PC is used by memory watchpoints (if enabled) or to print accurate PC locations in debug logs
MOV(32, PPCSTATE(pc), Imm32(pc));
ABI_PushRegistersAndAdjustStack(registersInUse, 8);
if (info.hasImmediate)
{
if (addrReg != ABI_PARAM2)
MOV(64, R(ABI_PARAM2), R(addrReg));
// we have to swap back the immediate to pass it to the write functions
switch (info.operandSize)
{
case 8:
PanicAlert("Invalid 64-bit immediate!");
break;
case 4:
MOV(32, R(ABI_PARAM1), Imm32(Common::swap32((u32)info.immediate)));
break;
case 2:
MOV(16, R(ABI_PARAM1), Imm16(Common::swap16((u16)info.immediate)));
break;
case 1:
MOV(8, R(ABI_PARAM1), Imm8((u8)info.immediate));
break;
}
}
else
{
MOVTwo(64, ABI_PARAM1, dataReg, ABI_PARAM2, addrReg);
}
if (info.displacement)
{
ADD(32, R(ABI_PARAM2), Imm32(info.displacement));
}
switch (info.operandSize)
{
case 8:
CALL((void *)&Memory::Write_U64);
break;
case 4:
CALL((void *)&Memory::Write_U32);
break;
case 2:
CALL((void *)&Memory::Write_U16);
break;
case 1:
CALL((void *)&Memory::Write_U8);
break;
}
ABI_PopRegistersAndAdjustStack(registersInUse, 8);
RET();
return trampoline;
}

View File

@ -0,0 +1,22 @@
// Copyright 2013 Dolphin Emulator Project
// Licensed under GPLv2
// Refer to the license.txt file included.
#pragma once
#include "Common/CommonTypes.h"
#include "Common/x64Analyzer.h"
#include "Common/x64Emitter.h"
// We need at least this many bytes for backpatching.
const int BACKPATCH_SIZE = 5;
class TrampolineCache : public Gen::X64CodeBlock
{
public:
void Init();
void Shutdown();
const u8 *GetReadTrampoline(const InstructionInfo &info, u32 registersInUse);
const u8 *GetWriteTrampoline(const InstructionInfo &info, u32 registersInUse, u32 pc);
};

View File

@ -190,13 +190,9 @@ namespace JitInterface
} }
#endif #endif
} }
bool IsInCodeSpace(u8 *ptr) bool HandleFault(uintptr_t access_address, SContext* ctx)
{ {
return jit->IsInCodeSpace(ptr); return jit->HandleFault(access_address, ctx);
}
const u8 *BackPatch(u8 *codePtr, u32 em_address, void *ctx)
{
return jit->BackPatch(codePtr, em_address, ctx);
} }
void ClearCache() void ClearCache()

View File

@ -7,6 +7,7 @@
#include <string> #include <string>
#include "Common/ChunkFile.h" #include "Common/ChunkFile.h"
#include "Core/PowerPC/CPUCoreBase.h" #include "Core/PowerPC/CPUCoreBase.h"
#include "Core/PowerPC/JitCommon/JitBackpatch.h"
namespace JitInterface namespace JitInterface
{ {
@ -20,8 +21,7 @@ namespace JitInterface
void WriteProfileResults(const std::string& filename); void WriteProfileResults(const std::string& filename);
// Memory Utilities // Memory Utilities
bool IsInCodeSpace(u8 *ptr); bool HandleFault(uintptr_t access_address, SContext* ctx);
const u8 *BackPatch(u8 *codePtr, u32 em_address, void *ctx);
// used by JIT to read instructions // used by JIT to read instructions
u32 Read_Opcode_JIT(const u32 _Address); u32 Read_Opcode_JIT(const u32 _Address);

View File

@ -23,42 +23,6 @@
namespace EMM namespace EMM
{ {
static bool DoFault(u64 bad_address, SContext *ctx)
{
if (!JitInterface::IsInCodeSpace((u8*) ctx->CTX_PC))
{
// Let's not prevent debugging.
return false;
}
u64 memspace_bottom = (u64)Memory::base;
u64 memspace_top = memspace_bottom +
#if _ARCH_64
0x100000000ULL;
#else
0x40000000;
#endif
if (bad_address < memspace_bottom || bad_address >= memspace_top)
{
return false;
}
u32 em_address = (u32)(bad_address - memspace_bottom);
const u8 *new_pc = jit->BackPatch((u8*) ctx->CTX_PC, em_address, ctx);
if (new_pc)
{
ctx->CTX_PC = (u64) new_pc;
}
else
{
// there was an error, give the debugger a chance
return false;
}
return true;
}
#ifdef _WIN32 #ifdef _WIN32
LONG NTAPI Handler(PEXCEPTION_POINTERS pPtrs) LONG NTAPI Handler(PEXCEPTION_POINTERS pPtrs)
@ -74,10 +38,10 @@ LONG NTAPI Handler(PEXCEPTION_POINTERS pPtrs)
} }
// virtual address of the inaccessible data // virtual address of the inaccessible data
u64 badAddress = (u64)pPtrs->ExceptionRecord->ExceptionInformation[1]; uintptr_t badAddress = (uintptr_t)pPtrs->ExceptionRecord->ExceptionInformation[1];
CONTEXT *ctx = pPtrs->ContextRecord; CONTEXT *ctx = pPtrs->ContextRecord;
if (DoFault(badAddress, ctx)) if (JitInterface::HandleFault(badAddress, ctx))
{ {
return (DWORD)EXCEPTION_CONTINUE_EXECUTION; return (DWORD)EXCEPTION_CONTINUE_EXECUTION;
} }
@ -198,7 +162,7 @@ void ExceptionThread(mach_port_t port)
x86_thread_state64_t *state = (x86_thread_state64_t *) msg_in.old_state; x86_thread_state64_t *state = (x86_thread_state64_t *) msg_in.old_state;
bool ok = DoFault(msg_in.code[1], state); bool ok = JitInterface::HandleFault((uintptr_t) msg_in.code[1], state);
// Set up the reply. // Set up the reply.
msg_out.Head.msgh_bits = MACH_MSGH_BITS(MACH_MSGH_BITS_REMOTE(msg_in.Head.msgh_bits), 0); msg_out.Head.msgh_bits = MACH_MSGH_BITS(MACH_MSGH_BITS_REMOTE(msg_in.Head.msgh_bits), 0);
@ -263,12 +227,12 @@ static void sigsegv_handler(int sig, siginfo_t *info, void *raw_context)
// Huh? Return. // Huh? Return.
return; return;
} }
u64 bad_address = (u64)info->si_addr; uintptr_t bad_address = (uintptr_t)info->si_addr;
// Get all the information we can out of the context. // Get all the information we can out of the context.
mcontext_t *ctx = &context->uc_mcontext; mcontext_t *ctx = &context->uc_mcontext;
// assume it's not a write // assume it's not a write
if (!DoFault(bad_address, ctx)) if (!JitInterface::HandleFault(bad_address, ctx))
{ {
// retry and crash // retry and crash
signal(SIGSEGV, SIG_DFL); signal(SIGSEGV, SIG_DFL);