Preventing emulated instructions from stomping the xmm registers.
This commit is contained in:
parent
3845437276
commit
574a04a853
|
@ -414,6 +414,20 @@ void X64Emitter::CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0), uin
|
||||||
ReloadEDX();
|
ReloadEDX();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void X64Emitter::CallNativeSafe(void* fn) {
|
||||||
|
// rcx = context
|
||||||
|
// rdx = target host function
|
||||||
|
// r8 = arg0
|
||||||
|
// r9 = arg1
|
||||||
|
mov(rdx, reinterpret_cast<uint64_t>(fn));
|
||||||
|
auto thunk = backend()->guest_to_host_thunk();
|
||||||
|
mov(rax, reinterpret_cast<uint64_t>(thunk));
|
||||||
|
call(rax);
|
||||||
|
ReloadECX();
|
||||||
|
ReloadEDX();
|
||||||
|
// rax = host return
|
||||||
|
}
|
||||||
|
|
||||||
void X64Emitter::SetReturnAddress(uint64_t value) {
|
void X64Emitter::SetReturnAddress(uint64_t value) {
|
||||||
mov(qword[rsp + StackLayout::GUEST_CALL_RET_ADDR], value);
|
mov(qword[rsp + StackLayout::GUEST_CALL_RET_ADDR], value);
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,6 +127,7 @@ public:
|
||||||
void CallNative(uint64_t(*fn)(void* raw_context));
|
void CallNative(uint64_t(*fn)(void* raw_context));
|
||||||
void CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0));
|
void CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0));
|
||||||
void CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0), uint64_t arg0);
|
void CallNative(uint64_t(*fn)(void* raw_context, uint64_t arg0), uint64_t arg0);
|
||||||
|
void CallNativeSafe(void* fn);
|
||||||
void SetReturnAddress(uint64_t value);
|
void SetReturnAddress(uint64_t value);
|
||||||
void ReloadECX();
|
void ReloadECX();
|
||||||
void ReloadEDX();
|
void ReloadEDX();
|
||||||
|
|
|
@ -3489,7 +3489,7 @@ EMITTER(POW2_F32, MATCH(I<OPCODE_POW2, F32<>, F32<>>)) {
|
||||||
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
||||||
XEASSERTALWAYS();
|
XEASSERTALWAYS();
|
||||||
e.lea(e.r8, e.StashXmm(i.src1));
|
e.lea(e.r8, e.StashXmm(i.src1));
|
||||||
e.CallNative(EmulatePow2);
|
e.CallNativeSafe(EmulatePow2);
|
||||||
e.vmovaps(i.dest, e.xmm0);
|
e.vmovaps(i.dest, e.xmm0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3501,7 +3501,7 @@ EMITTER(POW2_F64, MATCH(I<OPCODE_POW2, F64<>, F64<>>)) {
|
||||||
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
||||||
XEASSERTALWAYS();
|
XEASSERTALWAYS();
|
||||||
e.lea(e.r8, e.StashXmm(i.src1));
|
e.lea(e.r8, e.StashXmm(i.src1));
|
||||||
e.CallNative(EmulatePow2);
|
e.CallNativeSafe(EmulatePow2);
|
||||||
e.vmovaps(i.dest, e.xmm0);
|
e.vmovaps(i.dest, e.xmm0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3515,7 +3515,7 @@ EMITTER(POW2_V128, MATCH(I<OPCODE_POW2, V128<>, V128<>>)) {
|
||||||
}
|
}
|
||||||
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
||||||
e.lea(e.r8, e.StashXmm(i.src1));
|
e.lea(e.r8, e.StashXmm(i.src1));
|
||||||
e.CallNative(EmulatePow2);
|
e.CallNativeSafe(EmulatePow2);
|
||||||
e.vmovaps(i.dest, e.xmm0);
|
e.vmovaps(i.dest, e.xmm0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3540,7 +3540,7 @@ EMITTER(LOG2_F32, MATCH(I<OPCODE_LOG2, F32<>, F32<>>)) {
|
||||||
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
||||||
XEASSERTALWAYS();
|
XEASSERTALWAYS();
|
||||||
e.lea(e.r8, e.StashXmm(i.src1));
|
e.lea(e.r8, e.StashXmm(i.src1));
|
||||||
e.CallNative(EmulateLog2);
|
e.CallNativeSafe(EmulateLog2);
|
||||||
e.vmovaps(i.dest, e.xmm0);
|
e.vmovaps(i.dest, e.xmm0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3552,7 +3552,7 @@ EMITTER(LOG2_F64, MATCH(I<OPCODE_LOG2, F64<>, F64<>>)) {
|
||||||
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
||||||
XEASSERTALWAYS();
|
XEASSERTALWAYS();
|
||||||
e.lea(e.r8, e.StashXmm(i.src1));
|
e.lea(e.r8, e.StashXmm(i.src1));
|
||||||
e.CallNative(EmulateLog2);
|
e.CallNativeSafe(EmulateLog2);
|
||||||
e.vmovaps(i.dest, e.xmm0);
|
e.vmovaps(i.dest, e.xmm0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3565,9 +3565,8 @@ EMITTER(LOG2_V128, MATCH(I<OPCODE_LOG2, V128<>, V128<>>)) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
static void Emit(X64Emitter& e, const EmitArgType& i) {
|
||||||
XEASSERTALWAYS();
|
|
||||||
e.lea(e.r8, e.StashXmm(i.src1));
|
e.lea(e.r8, e.StashXmm(i.src1));
|
||||||
e.CallNative(EmulateLog2);
|
e.CallNativeSafe(EmulateLog2);
|
||||||
e.vmovaps(i.dest, e.xmm0);
|
e.vmovaps(i.dest, e.xmm0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue