diff --git a/src/alloy/backend/x64/x64_emitter.cc b/src/alloy/backend/x64/x64_emitter.cc index ace7964a6..6daba0195 100644 --- a/src/alloy/backend/x64/x64_emitter.cc +++ b/src/alloy/backend/x64/x64_emitter.cc @@ -414,6 +414,20 @@ void X64Emitter::CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0), uin ReloadEDX(); } +void X64Emitter::CallNativeSafe(void* fn) { + // rcx = context + // rdx = target host function + // r8 = arg0 + // r9 = arg1 + mov(rdx, reinterpret_cast(fn)); + auto thunk = backend()->guest_to_host_thunk(); + mov(rax, reinterpret_cast(thunk)); + call(rax); + ReloadECX(); + ReloadEDX(); + // rax = host return +} + void X64Emitter::SetReturnAddress(uint64_t value) { mov(qword[rsp + StackLayout::GUEST_CALL_RET_ADDR], value); } diff --git a/src/alloy/backend/x64/x64_emitter.h b/src/alloy/backend/x64/x64_emitter.h index 2a56411c8..12c8c0310 100644 --- a/src/alloy/backend/x64/x64_emitter.h +++ b/src/alloy/backend/x64/x64_emitter.h @@ -127,6 +127,7 @@ public: void CallNative(uint64_t(*fn)(void* raw_context)); void CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0)); void CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0), uint64_t arg0); + void CallNativeSafe(void* fn); void SetReturnAddress(uint64_t value); void ReloadECX(); void ReloadEDX(); diff --git a/src/alloy/backend/x64/x64_sequences.cc b/src/alloy/backend/x64/x64_sequences.cc index 4f7f55a18..f7fbf6997 100644 --- a/src/alloy/backend/x64/x64_sequences.cc +++ b/src/alloy/backend/x64/x64_sequences.cc @@ -3489,7 +3489,7 @@ EMITTER(POW2_F32, MATCH(I, F32<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { XEASSERTALWAYS(); e.lea(e.r8, e.StashXmm(i.src1)); - e.CallNative(EmulatePow2); + e.CallNativeSafe(EmulatePow2); e.vmovaps(i.dest, e.xmm0); } }; @@ -3501,7 +3501,7 @@ EMITTER(POW2_F64, MATCH(I, F64<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { XEASSERTALWAYS(); e.lea(e.r8, e.StashXmm(i.src1)); - e.CallNative(EmulatePow2); + e.CallNativeSafe(EmulatePow2); e.vmovaps(i.dest, e.xmm0); } }; @@ -3515,7 +3515,7 @@ EMITTER(POW2_V128, MATCH(I, V128<>>)) { } static void Emit(X64Emitter& e, const EmitArgType& i) { e.lea(e.r8, e.StashXmm(i.src1)); - e.CallNative(EmulatePow2); + e.CallNativeSafe(EmulatePow2); e.vmovaps(i.dest, e.xmm0); } }; @@ -3540,7 +3540,7 @@ EMITTER(LOG2_F32, MATCH(I, F32<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { XEASSERTALWAYS(); e.lea(e.r8, e.StashXmm(i.src1)); - e.CallNative(EmulateLog2); + e.CallNativeSafe(EmulateLog2); e.vmovaps(i.dest, e.xmm0); } }; @@ -3552,7 +3552,7 @@ EMITTER(LOG2_F64, MATCH(I, F64<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { XEASSERTALWAYS(); e.lea(e.r8, e.StashXmm(i.src1)); - e.CallNative(EmulateLog2); + e.CallNativeSafe(EmulateLog2); e.vmovaps(i.dest, e.xmm0); } }; @@ -3565,9 +3565,8 @@ EMITTER(LOG2_V128, MATCH(I, V128<>>)) { return result; } static void Emit(X64Emitter& e, const EmitArgType& i) { - XEASSERTALWAYS(); e.lea(e.r8, e.StashXmm(i.src1)); - e.CallNative(EmulateLog2); + e.CallNativeSafe(EmulateLog2); e.vmovaps(i.dest, e.xmm0); } };