diff --git a/Source/Core/Common/x64Emitter.cpp b/Source/Core/Common/x64Emitter.cpp index 850a63b293..618ec40b18 100644 --- a/Source/Core/Common/x64Emitter.cpp +++ b/Source/Core/Common/x64Emitter.cpp @@ -890,6 +890,33 @@ void XEmitter::WriteMOVBE(int bits, u8 op, X64Reg reg, OpArg arg) void XEmitter::MOVBE(int bits, X64Reg dest, const OpArg& src) {WriteMOVBE(bits, 0xF0, dest, src);} void XEmitter::MOVBE(int bits, const OpArg& dest, X64Reg src) {WriteMOVBE(bits, 0xF1, src, dest);} +void XEmitter::LoadAndSwap(int size, Gen::X64Reg dst, const Gen::OpArg& src) +{ + if (cpu_info.bMOVBE) + { + MOVBE(size, dst, src); + } + else + { + MOV(size, R(dst), src); + BSWAP(size, dst); + } +} + +void XEmitter::SwapAndStore(int size, const Gen::OpArg& dst, Gen::X64Reg src) +{ + if (cpu_info.bMOVBE) + { + MOVBE(size, dst, src); + } + else + { + BSWAP(size, src); + MOV(size, dst, R(src)); + } +} + + void XEmitter::LEA(int bits, X64Reg dest, OpArg src) { _assert_msg_(DYNA_REC, !src.IsImm(), "LEA - Imm argument"); diff --git a/Source/Core/Common/x64Emitter.h b/Source/Core/Common/x64Emitter.h index 00536dca34..48a1eae3b1 100644 --- a/Source/Core/Common/x64Emitter.h +++ b/Source/Core/Common/x64Emitter.h @@ -479,6 +479,8 @@ public: // Available only on Atom or >= Haswell so far. Test with cpu_info.bMOVBE. void MOVBE(int bits, X64Reg dest, const OpArg& src); void MOVBE(int bits, const OpArg& dest, X64Reg src); + void LoadAndSwap(int size, Gen::X64Reg dst, const Gen::OpArg& src); + void SwapAndStore(int size, const Gen::OpArg& dst, Gen::X64Reg src); // Available only on AMD >= Phenom or Intel >= Haswell void LZCNT(int bits, X64Reg dest, OpArg src); diff --git a/Source/Core/Core/PowerPC/JitCommon/Jit_Util.cpp b/Source/Core/Core/PowerPC/JitCommon/Jit_Util.cpp index a0c2e038cc..b8f2809fec 100644 --- a/Source/Core/Core/PowerPC/JitCommon/Jit_Util.cpp +++ b/Source/Core/Core/PowerPC/JitCommon/Jit_Util.cpp @@ -23,32 +23,6 @@ void EmuCodeBlock::MemoryExceptionCheck() } } -void EmuCodeBlock::LoadAndSwap(int size, Gen::X64Reg dst, const Gen::OpArg& src) -{ - if (cpu_info.bMOVBE) - { - MOVBE(size, dst, src); - } - else - { - MOV(size, R(dst), src); - BSWAP(size, dst); - } -} - -void EmuCodeBlock::SwapAndStore(int size, const Gen::OpArg& dst, Gen::X64Reg src) -{ - if (cpu_info.bMOVBE) - { - MOVBE(size, dst, src); - } - else - { - BSWAP(size, src); - MOV(size, dst, R(src)); - } -} - void EmuCodeBlock::UnsafeLoadRegToReg(X64Reg reg_addr, X64Reg reg_value, int accessSize, s32 offset, bool signExtend) { MOVZX(32, accessSize, reg_value, MComplex(RMEM, reg_addr, SCALE_1, offset)); diff --git a/Source/Core/Core/PowerPC/JitCommon/Jit_Util.h b/Source/Core/Core/PowerPC/JitCommon/Jit_Util.h index 9c11937b47..c3175633ba 100644 --- a/Source/Core/Core/PowerPC/JitCommon/Jit_Util.h +++ b/Source/Core/Core/PowerPC/JitCommon/Jit_Util.h @@ -68,9 +68,6 @@ public: SetCodePtr(nearcode); } - void LoadAndSwap(int size, Gen::X64Reg dst, const Gen::OpArg& src); - void SwapAndStore(int size, const Gen::OpArg& dst, Gen::X64Reg src); - Gen::FixupBranch CheckIfSafeAddress(Gen::OpArg reg_value, Gen::X64Reg reg_addr, BitSet32 registers_in_use, u32 mem_mask); void UnsafeLoadRegToReg(Gen::X64Reg reg_addr, Gen::X64Reg reg_value, int accessSize, s32 offset = 0, bool signExtend = false); void UnsafeLoadRegToRegNoSwap(Gen::X64Reg reg_addr, Gen::X64Reg reg_value, int accessSize, s32 offset, bool signExtend = false);