Fallback case if indirection table could not be allocated.
This commit is contained in:
parent
b9326be93c
commit
f3fe260a17
|
@ -52,7 +52,6 @@ bool X64CodeCache::Initialize() {
|
||||||
"This is likely because the %.8X-%.8X range is in use by some other "
|
"This is likely because the %.8X-%.8X range is in use by some other "
|
||||||
"system DLL",
|
"system DLL",
|
||||||
kIndirectionTableBase, kIndirectionTableBase + kIndirectionTableSize);
|
kIndirectionTableBase, kIndirectionTableBase + kIndirectionTableSize);
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create mmap file. This allows us to share the code cache with the debugger.
|
// Create mmap file. This allows us to share the code cache with the debugger.
|
||||||
|
@ -91,6 +90,8 @@ void X64CodeCache::set_indirection_default(uint32_t default_value) {
|
||||||
|
|
||||||
void X64CodeCache::AddIndirection(uint32_t guest_address,
|
void X64CodeCache::AddIndirection(uint32_t guest_address,
|
||||||
uint32_t host_address) {
|
uint32_t host_address) {
|
||||||
|
assert_not_null(indirection_table_base_);
|
||||||
|
|
||||||
uint32_t* indirection_slot = reinterpret_cast<uint32_t*>(
|
uint32_t* indirection_slot = reinterpret_cast<uint32_t*>(
|
||||||
indirection_table_base_ + (guest_address - kIndirectionTableBase));
|
indirection_table_base_ + (guest_address - kIndirectionTableBase));
|
||||||
*indirection_slot = host_address;
|
*indirection_slot = host_address;
|
||||||
|
@ -98,6 +99,10 @@ void X64CodeCache::AddIndirection(uint32_t guest_address,
|
||||||
|
|
||||||
void X64CodeCache::CommitExecutableRange(uint32_t guest_low,
|
void X64CodeCache::CommitExecutableRange(uint32_t guest_low,
|
||||||
uint32_t guest_high) {
|
uint32_t guest_high) {
|
||||||
|
if (!indirection_table_base_) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Commit the memory.
|
// Commit the memory.
|
||||||
xe::memory::AllocFixed(
|
xe::memory::AllocFixed(
|
||||||
indirection_table_base_ + (guest_low - kIndirectionTableBase),
|
indirection_table_base_ + (guest_low - kIndirectionTableBase),
|
||||||
|
@ -178,7 +183,7 @@ void* X64CodeCache::PlaceGuestCode(uint32_t guest_address, void* machine_code,
|
||||||
// Now that everything is ready, fix up the indirection table.
|
// Now that everything is ready, fix up the indirection table.
|
||||||
// Note that we do support code that doesn't have an indirection fixup, so
|
// Note that we do support code that doesn't have an indirection fixup, so
|
||||||
// ignore those when we see them.
|
// ignore those when we see them.
|
||||||
if (guest_address) {
|
if (guest_address && indirection_table_base_) {
|
||||||
uint32_t* indirection_slot = reinterpret_cast<uint32_t*>(
|
uint32_t* indirection_slot = reinterpret_cast<uint32_t*>(
|
||||||
indirection_table_base_ + (guest_address - kIndirectionTableBase));
|
indirection_table_base_ + (guest_address - kIndirectionTableBase));
|
||||||
*indirection_slot = uint32_t(reinterpret_cast<uint64_t>(code_address));
|
*indirection_slot = uint32_t(reinterpret_cast<uint64_t>(code_address));
|
||||||
|
|
|
@ -41,6 +41,7 @@ class X64CodeCache : public CodeCache {
|
||||||
// TODO(benvanik): keep track of code blocks
|
// TODO(benvanik): keep track of code blocks
|
||||||
// TODO(benvanik): padding/guards/etc
|
// TODO(benvanik): padding/guards/etc
|
||||||
|
|
||||||
|
bool has_indirection_table() { return indirection_table_base_ != nullptr; }
|
||||||
void set_indirection_default(uint32_t default_value);
|
void set_indirection_default(uint32_t default_value);
|
||||||
void AddIndirection(uint32_t guest_address, uint32_t host_address);
|
void AddIndirection(uint32_t guest_address, uint32_t host_address);
|
||||||
|
|
||||||
|
|
|
@ -366,12 +366,21 @@ void X64Emitter::Call(const hir::Instr* instr, GuestFunction* function) {
|
||||||
// a ResolveFunction call, but makes the table less useful.
|
// a ResolveFunction call, but makes the table less useful.
|
||||||
assert_zero(uint64_t(fn->machine_code()) & 0xFFFFFFFF00000000);
|
assert_zero(uint64_t(fn->machine_code()) & 0xFFFFFFFF00000000);
|
||||||
mov(eax, uint32_t(uint64_t(fn->machine_code())));
|
mov(eax, uint32_t(uint64_t(fn->machine_code())));
|
||||||
} else {
|
} else if (code_cache_->has_indirection_table()) {
|
||||||
// Load the pointer to the indirection table maintained in X64CodeCache.
|
// Load the pointer to the indirection table maintained in X64CodeCache.
|
||||||
// The target dword will either contain the address of the generated code
|
// The target dword will either contain the address of the generated code
|
||||||
// or a thunk to ResolveAddress.
|
// or a thunk to ResolveAddress.
|
||||||
mov(ebx, function->address());
|
mov(ebx, function->address());
|
||||||
mov(eax, dword[ebx]);
|
mov(eax, dword[ebx]);
|
||||||
|
} else {
|
||||||
|
// Old-style resolve.
|
||||||
|
// Not too important because indirection table is almost always available.
|
||||||
|
// TODO: Overwrite the call-site with a straight call.
|
||||||
|
mov(rax, reinterpret_cast<uint64_t>(ResolveFunction));
|
||||||
|
mov(rdx, function->address());
|
||||||
|
call(rax);
|
||||||
|
ReloadECX();
|
||||||
|
ReloadEDX();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Actually jump/call to rax.
|
// Actually jump/call to rax.
|
||||||
|
@ -403,10 +412,20 @@ void X64Emitter::CallIndirect(const hir::Instr* instr,
|
||||||
// Load the pointer to the indirection table maintained in X64CodeCache.
|
// Load the pointer to the indirection table maintained in X64CodeCache.
|
||||||
// The target dword will either contain the address of the generated code
|
// The target dword will either contain the address of the generated code
|
||||||
// or a thunk to ResolveAddress.
|
// or a thunk to ResolveAddress.
|
||||||
if (reg.cvt32() != ebx) {
|
if (code_cache_->has_indirection_table()) {
|
||||||
mov(ebx, reg.cvt32());
|
if (reg.cvt32() != ebx) {
|
||||||
|
mov(ebx, reg.cvt32());
|
||||||
|
}
|
||||||
|
mov(eax, dword[ebx]);
|
||||||
|
} else {
|
||||||
|
// Old-style resolve.
|
||||||
|
// Not too important because indirection table is almost always available.
|
||||||
|
mov(edx, reg.cvt32());
|
||||||
|
mov(rax, reinterpret_cast<uint64_t>(ResolveFunction));
|
||||||
|
call(rax);
|
||||||
|
ReloadECX();
|
||||||
|
ReloadEDX();
|
||||||
}
|
}
|
||||||
mov(eax, dword[ebx]);
|
|
||||||
|
|
||||||
// Actually jump/call to rax.
|
// Actually jump/call to rax.
|
||||||
if (instr->flags & hir::CALL_TAIL) {
|
if (instr->flags & hir::CALL_TAIL) {
|
||||||
|
|
Loading…
Reference in New Issue