2020-06-30 21:50:41 +00:00
|
|
|
#if defined(__SWITCH__)
|
2020-12-09 17:58:51 +00:00
|
|
|
#include <switch.h>
|
|
|
|
#include "frontend/switch/FaultHandler.h"
|
2020-06-30 21:50:41 +00:00
|
|
|
#elif defined(_WIN32)
|
|
|
|
#include <windows.h>
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
|
|
|
#include <sys/mman.h>
|
|
|
|
#include <sys/stat.h>
|
|
|
|
#include <fcntl.h>
|
|
|
|
#include <unistd.h>
|
|
|
|
#include <signal.h>
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
|
|
|
|
2020-11-30 14:33:43 +00:00
|
|
|
#if defined(__ANDROID__)
|
|
|
|
#include <dlfcn.h>
|
|
|
|
#include <linux/ashmem.h>
|
|
|
|
#include <sys/ioctl.h>
|
|
|
|
#endif
|
|
|
|
|
2020-06-14 19:04:25 +00:00
|
|
|
#include "ARMJIT_Memory.h"
|
|
|
|
|
|
|
|
#include "ARMJIT_Internal.h"
|
|
|
|
#include "ARMJIT_Compiler.h"
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#include "DSi.h"
|
2020-06-14 19:04:25 +00:00
|
|
|
#include "GPU.h"
|
|
|
|
#include "GPU3D.h"
|
|
|
|
#include "Wifi.h"
|
|
|
|
#include "NDSCart.h"
|
|
|
|
#include "SPU.h"
|
|
|
|
|
2020-12-09 17:58:51 +00:00
|
|
|
#include <stdlib.h>
|
2020-06-14 19:04:25 +00:00
|
|
|
|
|
|
|
/*
|
2020-07-23 15:43:25 +00:00
|
|
|
We're handling fastmem here.
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
Basically we're repurposing a big piece of virtual memory
|
|
|
|
and map the memory regions as they're structured on the DS
|
|
|
|
in it.
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
On most systems you have a single piece of main ram,
|
|
|
|
maybe some video ram and faster cache RAM and that's about it.
|
|
|
|
Here we have not only a lot more different memory regions,
|
|
|
|
but also two address spaces. Not only that but they all have
|
|
|
|
mirrors (the worst case is 16kb SWRAM which is mirrored 1024x).
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
We handle this by only mapping those regions which are actually
|
|
|
|
used and by praying the games don't go wild.
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
Beware, this file is full of platform specific code and copied
|
|
|
|
from Dolphin, so enjoy the copied comments!
|
2020-06-14 19:04:25 +00:00
|
|
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
namespace ARMJIT_Memory
|
|
|
|
{
|
|
|
|
struct FaultDescription
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
u32 EmulatedFaultAddr;
|
2020-11-09 19:43:31 +00:00
|
|
|
u8* FaultPC;
|
2020-06-14 19:04:25 +00:00
|
|
|
};
|
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
bool FaultHandler(FaultDescription& faultDesc);
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-11-30 14:33:43 +00:00
|
|
|
#if defined(__ANDROID__)
|
|
|
|
#define ASHMEM_DEVICE "/dev/ashmem"
|
|
|
|
#endif
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#if defined(__SWITCH__)
|
2020-06-14 19:04:25 +00:00
|
|
|
// with LTO the symbols seem to be not properly overriden
|
|
|
|
// if they're somewhere else
|
|
|
|
|
|
|
|
extern "C"
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
void ARM_RestoreContext(u64* registers) __attribute__((noreturn));
|
|
|
|
|
2020-06-14 19:04:25 +00:00
|
|
|
extern char __start__;
|
|
|
|
extern char __rodata_start;
|
|
|
|
|
|
|
|
alignas(16) u8 __nx_exception_stack[0x8000];
|
|
|
|
u64 __nx_exception_stack_size = 0x8000;
|
|
|
|
|
|
|
|
void __libnx_exception_handler(ThreadExceptionDump* ctx)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
ARMJIT_Memory::FaultDescription desc;
|
|
|
|
u8* curArea = (u8*)(NDS::CurCPU == 0 ? ARMJIT_Memory::FastMem9Start : ARMJIT_Memory::FastMem7Start);
|
|
|
|
desc.EmulatedFaultAddr = (u8*)ctx->far.x - curArea;
|
2020-11-09 19:43:31 +00:00
|
|
|
desc.FaultPC = (u8*)ctx->pc.x;
|
2020-07-23 15:43:25 +00:00
|
|
|
|
|
|
|
u64 integerRegisters[33];
|
|
|
|
memcpy(integerRegisters, &ctx->cpu_gprs[0].x, 8*29);
|
|
|
|
integerRegisters[29] = ctx->fp.x;
|
|
|
|
integerRegisters[30] = ctx->lr.x;
|
|
|
|
integerRegisters[31] = ctx->sp.x;
|
|
|
|
integerRegisters[32] = ctx->pc.x;
|
|
|
|
|
2020-12-09 17:58:51 +00:00
|
|
|
if (ARMJIT_Memory::FaultHandler(desc))
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
2020-11-09 19:43:31 +00:00
|
|
|
integerRegisters[32] = (u64)desc.FaultPC;
|
2020-07-23 15:43:25 +00:00
|
|
|
|
|
|
|
ARM_RestoreContext(integerRegisters);
|
|
|
|
}
|
|
|
|
|
2020-12-09 17:58:51 +00:00
|
|
|
HandleFault(ctx->pc.x, ctx->lr.x, ctx->fp.x, ctx->far.x, ctx->error_desc);
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2020-06-30 21:50:41 +00:00
|
|
|
|
|
|
|
#elif defined(_WIN32)
|
|
|
|
|
|
|
|
static LONG ExceptionHandler(EXCEPTION_POINTERS* exceptionInfo)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
if (exceptionInfo->ExceptionRecord->ExceptionCode != EXCEPTION_ACCESS_VIOLATION)
|
|
|
|
{
|
|
|
|
return EXCEPTION_CONTINUE_SEARCH;
|
|
|
|
}
|
|
|
|
|
|
|
|
ARMJIT_Memory::FaultDescription desc;
|
|
|
|
u8* curArea = (u8*)(NDS::CurCPU == 0 ? ARMJIT_Memory::FastMem9Start : ARMJIT_Memory::FastMem7Start);
|
|
|
|
desc.EmulatedFaultAddr = (u8*)exceptionInfo->ExceptionRecord->ExceptionInformation[1] - curArea;
|
2020-11-09 19:43:31 +00:00
|
|
|
desc.FaultPC = (u8*)exceptionInfo->ContextRecord->Rip;
|
2020-07-23 15:43:25 +00:00
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
if (ARMJIT_Memory::FaultHandler(desc))
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
2020-11-09 19:56:31 +00:00
|
|
|
exceptionInfo->ContextRecord->Rip = (u64)desc.FaultPC;
|
2020-07-23 15:43:25 +00:00
|
|
|
return EXCEPTION_CONTINUE_EXECUTION;
|
|
|
|
}
|
|
|
|
|
|
|
|
return EXCEPTION_CONTINUE_SEARCH;
|
2020-06-30 21:50:41 +00:00
|
|
|
}
|
|
|
|
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
static struct sigaction OldSaSegv;
|
|
|
|
static struct sigaction OldSaBus;
|
2020-07-04 16:58:00 +00:00
|
|
|
|
|
|
|
static void SigsegvHandler(int sig, siginfo_t* info, void* rawContext)
|
|
|
|
{
|
2020-11-09 19:43:31 +00:00
|
|
|
if (sig != SIGSEGV && sig != SIGBUS)
|
|
|
|
{
|
|
|
|
// We are not interested in other signals - handle it as usual.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (info->si_code != SEGV_MAPERR && info->si_code != SEGV_ACCERR)
|
|
|
|
{
|
|
|
|
// Huh? Return.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
ucontext_t* context = (ucontext_t*)rawContext;
|
2020-11-09 19:43:31 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
ARMJIT_Memory::FaultDescription desc;
|
|
|
|
u8* curArea = (u8*)(NDS::CurCPU == 0 ? ARMJIT_Memory::FastMem9Start : ARMJIT_Memory::FastMem7Start);
|
2020-07-08 21:47:24 +00:00
|
|
|
#ifdef __x86_64__
|
2020-07-23 15:43:25 +00:00
|
|
|
desc.EmulatedFaultAddr = (u8*)info->si_addr - curArea;
|
2021-01-22 18:20:32 +00:00
|
|
|
#if defined(__APPLE__)
|
2020-11-29 16:11:33 +00:00
|
|
|
desc.FaultPC = (u8*)context->uc_mcontext->__ss.__rip;
|
2021-01-22 18:20:32 +00:00
|
|
|
#elif defined(__FreeBSD__)
|
|
|
|
desc.FaultPC = (u8*)context->uc_mcontext.mc_rip;
|
2021-02-03 16:14:53 +00:00
|
|
|
#elif defined(__NetBSD__)
|
|
|
|
desc.FaultPC = (u8*)context->uc_mcontext.__gregs[_REG_RIP];
|
2020-11-29 16:11:33 +00:00
|
|
|
#else
|
|
|
|
desc.FaultPC = (u8*)context->uc_mcontext.gregs[REG_RIP];
|
|
|
|
#endif
|
|
|
|
|
2020-07-08 21:47:24 +00:00
|
|
|
#else
|
2020-12-11 00:41:53 +00:00
|
|
|
#ifdef __APPLE__
|
|
|
|
desc.EmulatedFaultAddr = (u8*)context->uc_mcontext->__es.__far - curArea;
|
|
|
|
desc.FaultPC = (u8*)context->uc_mcontext->__ss.__pc;
|
|
|
|
#else
|
|
|
|
desc.EmulatedFaultAddr = (u8*)context->uc_mcontext.fault_address - curArea;
|
|
|
|
desc.FaultPC = (u8*)context->uc_mcontext.pc;
|
|
|
|
#endif
|
2020-07-08 21:47:24 +00:00
|
|
|
#endif
|
2020-07-04 16:58:00 +00:00
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
if (ARMJIT_Memory::FaultHandler(desc))
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
2020-07-08 21:47:24 +00:00
|
|
|
#ifdef __x86_64__
|
2021-01-22 18:20:32 +00:00
|
|
|
#if defined(__APPLE__)
|
2020-11-29 16:11:33 +00:00
|
|
|
context->uc_mcontext->__ss.__rip = (u64)desc.FaultPC;
|
2021-01-22 18:20:32 +00:00
|
|
|
#elif defined(__FreeBSD__)
|
|
|
|
context->uc_mcontext.mc_rip = (u64)desc.FaultPC;
|
2021-02-03 16:14:53 +00:00
|
|
|
#elif defined(__NetBSD__)
|
|
|
|
context->uc_mcontext.__gregs[_REG_RIP] = (u64)desc.FaultPC;
|
2020-11-29 16:11:33 +00:00
|
|
|
#else
|
|
|
|
context->uc_mcontext.gregs[REG_RIP] = (u64)desc.FaultPC;
|
|
|
|
#endif
|
2020-07-08 21:47:24 +00:00
|
|
|
#else
|
2020-12-11 00:41:53 +00:00
|
|
|
#ifdef __APPLE__
|
|
|
|
context->uc_mcontext->__ss.__pc = (u64)desc.FaultPC;
|
|
|
|
#else
|
|
|
|
context->uc_mcontext.pc = (u64)desc.FaultPC;
|
|
|
|
#endif
|
2020-07-08 21:47:24 +00:00
|
|
|
#endif
|
2020-07-23 15:43:25 +00:00
|
|
|
return;
|
|
|
|
}
|
2020-07-04 16:58:00 +00:00
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
struct sigaction* oldSa;
|
|
|
|
if (sig == SIGSEGV)
|
|
|
|
oldSa = &OldSaSegv;
|
|
|
|
else
|
|
|
|
oldSa = &OldSaBus;
|
|
|
|
|
|
|
|
if (oldSa->sa_flags & SA_SIGINFO)
|
2020-07-04 16:58:00 +00:00
|
|
|
{
|
2020-11-09 19:43:31 +00:00
|
|
|
oldSa->sa_sigaction(sig, info, rawContext);
|
2020-07-04 16:58:00 +00:00
|
|
|
return;
|
|
|
|
}
|
2020-11-09 19:43:31 +00:00
|
|
|
if (oldSa->sa_handler == SIG_DFL)
|
2020-07-04 16:58:00 +00:00
|
|
|
{
|
|
|
|
signal(sig, SIG_DFL);
|
|
|
|
return;
|
|
|
|
}
|
2020-11-09 19:43:31 +00:00
|
|
|
if (oldSa->sa_handler == SIG_IGN)
|
2020-07-04 16:58:00 +00:00
|
|
|
{
|
|
|
|
// Ignore signal
|
|
|
|
return;
|
|
|
|
}
|
2020-11-09 19:43:31 +00:00
|
|
|
oldSa->sa_handler(sig);
|
2020-07-04 16:58:00 +00:00
|
|
|
}
|
|
|
|
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
namespace ARMJIT_Memory
|
|
|
|
{
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
void* FastMem9Start, *FastMem7Start;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#ifdef _WIN32
|
|
|
|
inline u32 RoundUp(u32 size)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
return (size + 0xFFFF) & ~0xFFFF;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
#else
|
2020-06-30 21:50:41 +00:00
|
|
|
inline u32 RoundUp(u32 size)
|
2020-06-14 19:04:25 +00:00
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
return size;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const u32 MemBlockMainRAMOffset = 0;
|
2020-06-30 21:50:41 +00:00
|
|
|
const u32 MemBlockSWRAMOffset = RoundUp(NDS::MainRAMMaxSize);
|
|
|
|
const u32 MemBlockARM7WRAMOffset = MemBlockSWRAMOffset + RoundUp(NDS::SharedWRAMSize);
|
|
|
|
const u32 MemBlockDTCMOffset = MemBlockARM7WRAMOffset + RoundUp(NDS::ARM7WRAMSize);
|
|
|
|
const u32 MemBlockNWRAM_AOffset = MemBlockDTCMOffset + RoundUp(DTCMPhysicalSize);
|
|
|
|
const u32 MemBlockNWRAM_BOffset = MemBlockNWRAM_AOffset + RoundUp(DSi::NWRAMSize);
|
|
|
|
const u32 MemBlockNWRAM_COffset = MemBlockNWRAM_BOffset + RoundUp(DSi::NWRAMSize);
|
|
|
|
const u32 MemoryTotalSize = MemBlockNWRAM_COffset + RoundUp(DSi::NWRAMSize);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
|
|
|
const u32 OffsetsPerRegion[memregions_Count] =
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
MemBlockDTCMOffset,
|
|
|
|
UINT32_MAX,
|
|
|
|
MemBlockMainRAMOffset,
|
|
|
|
MemBlockSWRAMOffset,
|
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
MemBlockARM7WRAMOffset,
|
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
UINT32_MAX,
|
|
|
|
MemBlockNWRAM_AOffset,
|
|
|
|
MemBlockNWRAM_BOffset,
|
|
|
|
MemBlockNWRAM_COffset
|
2020-06-14 19:04:25 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
enum
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
memstate_Unmapped,
|
|
|
|
memstate_MappedRW,
|
2020-11-09 19:43:31 +00:00
|
|
|
// on Switch this is unmapped as well
|
2020-07-23 15:43:25 +00:00
|
|
|
memstate_MappedProtected,
|
2020-06-14 19:04:25 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
u8 MappingStatus9[1 << (32-12)];
|
|
|
|
u8 MappingStatus7[1 << (32-12)];
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#if defined(__SWITCH__)
|
2021-01-05 13:36:15 +00:00
|
|
|
VirtmemReservation* FastMem9Reservation, *FastMem7Reservation;
|
2020-06-14 19:04:25 +00:00
|
|
|
u8* MemoryBase;
|
|
|
|
u8* MemoryBaseCodeMem;
|
2020-06-30 21:50:41 +00:00
|
|
|
#elif defined(_WIN32)
|
2020-06-14 19:04:25 +00:00
|
|
|
u8* MemoryBase;
|
2020-06-30 21:50:41 +00:00
|
|
|
HANDLE MemoryFile;
|
|
|
|
LPVOID ExceptionHandlerHandle;
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
|
|
|
u8* MemoryBase;
|
|
|
|
int MemoryFile;
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
bool MapIntoRange(u32 addr, u32 num, u32 offset, u32 size)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* dst = (u8*)(num == 0 ? FastMem9Start : FastMem7Start) + addr;
|
2020-06-14 19:04:25 +00:00
|
|
|
#ifdef __SWITCH__
|
2020-07-23 15:43:25 +00:00
|
|
|
Result r = (svcMapProcessMemory(dst, envGetOwnProcessHandle(),
|
|
|
|
(u64)(MemoryBaseCodeMem + offset), size));
|
|
|
|
return R_SUCCEEDED(r);
|
2020-06-30 21:50:41 +00:00
|
|
|
#elif defined(_WIN32)
|
2020-07-23 15:43:25 +00:00
|
|
|
bool r = MapViewOfFileEx(MemoryFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, offset, size, dst) == dst;
|
|
|
|
return r;
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
2020-07-23 15:43:25 +00:00
|
|
|
return mmap(dst, size, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_FIXED, MemoryFile, offset) != MAP_FAILED;
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
bool UnmapFromRange(u32 addr, u32 num, u32 offset, u32 size)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* dst = (u8*)(num == 0 ? FastMem9Start : FastMem7Start) + addr;
|
2020-06-14 19:04:25 +00:00
|
|
|
#ifdef __SWITCH__
|
2020-07-23 15:43:25 +00:00
|
|
|
Result r = svcUnmapProcessMemory(dst, envGetOwnProcessHandle(),
|
|
|
|
(u64)(MemoryBaseCodeMem + offset), size);
|
|
|
|
return R_SUCCEEDED(r);
|
2020-07-04 16:58:00 +00:00
|
|
|
#elif defined(_WIN32)
|
2020-07-23 15:43:25 +00:00
|
|
|
return UnmapViewOfFile(dst);
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
2020-07-23 15:43:25 +00:00
|
|
|
return munmap(dst, size) == 0;
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2020-09-11 17:29:06 +00:00
|
|
|
#ifndef __SWITCH__
|
2020-06-30 21:50:41 +00:00
|
|
|
void SetCodeProtectionRange(u32 addr, u32 size, u32 num, int protection)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* dst = (u8*)(num == 0 ? FastMem9Start : FastMem7Start) + addr;
|
2020-06-30 21:50:41 +00:00
|
|
|
#if defined(_WIN32)
|
2020-07-23 15:43:25 +00:00
|
|
|
DWORD winProtection, oldProtection;
|
|
|
|
if (protection == 0)
|
|
|
|
winProtection = PAGE_NOACCESS;
|
|
|
|
else if (protection == 1)
|
|
|
|
winProtection = PAGE_READONLY;
|
|
|
|
else
|
|
|
|
winProtection = PAGE_READWRITE;
|
|
|
|
bool success = VirtualProtect(dst, size, winProtection, &oldProtection);
|
|
|
|
assert(success);
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
2020-07-23 15:43:25 +00:00
|
|
|
int posixProt;
|
|
|
|
if (protection == 0)
|
|
|
|
posixProt = PROT_NONE;
|
|
|
|
else if (protection == 1)
|
|
|
|
posixProt = PROT_READ;
|
|
|
|
else
|
|
|
|
posixProt = PROT_READ | PROT_WRITE;
|
|
|
|
mprotect(dst, size, posixProt);
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
|
|
|
}
|
2020-09-11 17:29:06 +00:00
|
|
|
#endif
|
2020-06-14 19:04:25 +00:00
|
|
|
|
|
|
|
struct Mapping
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
u32 Addr;
|
|
|
|
u32 Size, LocalOffset;
|
|
|
|
u32 Num;
|
|
|
|
|
|
|
|
void Unmap(int region)
|
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
u32 dtcmStart = NDS::ARM9->DTCMBase;
|
|
|
|
u32 dtcmSize = NDS::ARM9->DTCMSize;
|
2020-07-23 15:43:25 +00:00
|
|
|
bool skipDTCM = Num == 0 && region != memregion_DTCM;
|
|
|
|
u8* statuses = Num == 0 ? MappingStatus9 : MappingStatus7;
|
|
|
|
u32 offset = 0;
|
|
|
|
while (offset < Size)
|
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
if (skipDTCM && Addr + offset == dtcmStart)
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
offset += dtcmSize;
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
u32 segmentOffset = offset;
|
|
|
|
u8 status = statuses[(Addr + offset) >> 12];
|
|
|
|
while (statuses[(Addr + offset) >> 12] == status
|
|
|
|
&& offset < Size
|
2020-11-13 14:20:53 +00:00
|
|
|
&& (!skipDTCM || Addr + offset != dtcmStart))
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
|
|
|
assert(statuses[(Addr + offset) >> 12] != memstate_Unmapped);
|
|
|
|
statuses[(Addr + offset) >> 12] = memstate_Unmapped;
|
|
|
|
offset += 0x1000;
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#ifdef __SWITCH__
|
2020-07-23 15:43:25 +00:00
|
|
|
if (status == memstate_MappedRW)
|
|
|
|
{
|
|
|
|
u32 segmentSize = offset - segmentOffset;
|
|
|
|
printf("unmapping %x %x %x %x\n", Addr + segmentOffset, Num, segmentOffset + LocalOffset + OffsetsPerRegion[region], segmentSize);
|
|
|
|
bool success = UnmapFromRange(Addr + segmentOffset, Num, segmentOffset + LocalOffset + OffsetsPerRegion[region], segmentSize);
|
|
|
|
assert(success);
|
|
|
|
}
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
|
|
|
}
|
2020-11-13 14:20:53 +00:00
|
|
|
|
2020-07-04 16:58:00 +00:00
|
|
|
#ifndef __SWITCH__
|
2020-11-13 14:20:53 +00:00
|
|
|
#ifndef _WIN32
|
|
|
|
u32 dtcmEnd = dtcmStart + dtcmSize;
|
|
|
|
if (Num == 0
|
|
|
|
&& dtcmEnd >= Addr
|
|
|
|
&& dtcmStart < Addr + Size)
|
|
|
|
{
|
|
|
|
bool success;
|
|
|
|
if (dtcmStart > Addr)
|
|
|
|
{
|
|
|
|
success = UnmapFromRange(Addr, 0, OffsetsPerRegion[region] + LocalOffset, dtcmStart - Addr);
|
|
|
|
assert(success);
|
|
|
|
}
|
|
|
|
if (dtcmEnd < Addr + Size)
|
|
|
|
{
|
|
|
|
u32 offset = dtcmStart - Addr + dtcmSize;
|
|
|
|
success = UnmapFromRange(dtcmEnd, 0, OffsetsPerRegion[region] + LocalOffset + offset, Size - offset);
|
|
|
|
assert(success);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
#endif
|
|
|
|
{
|
|
|
|
bool succeded = UnmapFromRange(Addr, Num, OffsetsPerRegion[region] + LocalOffset, Size);
|
|
|
|
assert(succeded);
|
|
|
|
}
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
};
|
|
|
|
ARMJIT::TinyVector<Mapping> Mappings[memregions_Count];
|
|
|
|
|
|
|
|
void SetCodeProtection(int region, u32 offset, bool protect)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
offset &= ~0xFFF;
|
2020-07-28 23:31:57 +00:00
|
|
|
//printf("set code protection %d %x %d\n", region, offset, protect);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
for (int i = 0; i < Mappings[region].Length; i++)
|
|
|
|
{
|
|
|
|
Mapping& mapping = Mappings[region][i];
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
if (offset < mapping.LocalOffset || offset >= mapping.LocalOffset + mapping.Size)
|
|
|
|
continue;
|
2020-07-08 21:08:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u32 effectiveAddr = mapping.Addr + (offset - mapping.LocalOffset);
|
|
|
|
if (mapping.Num == 0
|
|
|
|
&& region != memregion_DTCM
|
|
|
|
&& effectiveAddr >= NDS::ARM9->DTCMBase
|
|
|
|
&& effectiveAddr < (NDS::ARM9->DTCMBase + NDS::ARM9->DTCMSize))
|
|
|
|
continue;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* states = (u8*)(mapping.Num == 0 ? MappingStatus9 : MappingStatus7);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-28 23:31:57 +00:00
|
|
|
//printf("%x %d %x %x %x %d\n", effectiveAddr, mapping.Num, mapping.Addr, mapping.LocalOffset, mapping.Size, states[effectiveAddr >> 12]);
|
2020-07-23 15:43:25 +00:00
|
|
|
assert(states[effectiveAddr >> 12] == (protect ? memstate_MappedRW : memstate_MappedProtected));
|
|
|
|
states[effectiveAddr >> 12] = protect ? memstate_MappedProtected : memstate_MappedRW;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#if defined(__SWITCH__)
|
2020-07-23 15:43:25 +00:00
|
|
|
bool success;
|
|
|
|
if (protect)
|
|
|
|
success = UnmapFromRange(effectiveAddr, mapping.Num, OffsetsPerRegion[region] + offset, 0x1000);
|
|
|
|
else
|
|
|
|
success = MapIntoRange(effectiveAddr, mapping.Num, OffsetsPerRegion[region] + offset, 0x1000);
|
|
|
|
assert(success);
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
2020-07-23 15:43:25 +00:00
|
|
|
SetCodeProtectionRange(effectiveAddr, 0x1000, mapping.Num, protect ? 1 : 2);
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void RemapDTCM(u32 newBase, u32 newSize)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
// this first part could be made more efficient
|
|
|
|
// by unmapping DTCM first and then map the holes
|
|
|
|
u32 oldDTCMBase = NDS::ARM9->DTCMBase;
|
|
|
|
u32 oldDTCBEnd = oldDTCMBase + NDS::ARM9->DTCMSize;
|
|
|
|
|
|
|
|
u32 newEnd = newBase + newSize;
|
|
|
|
|
|
|
|
printf("remapping DTCM %x %x %x %x\n", newBase, newEnd, oldDTCMBase, oldDTCBEnd);
|
|
|
|
// unmap all regions containing the old or the current DTCM mapping
|
|
|
|
for (int region = 0; region < memregions_Count; region++)
|
|
|
|
{
|
|
|
|
if (region == memregion_DTCM)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
for (int i = 0; i < Mappings[region].Length;)
|
|
|
|
{
|
|
|
|
Mapping& mapping = Mappings[region][i];
|
|
|
|
|
|
|
|
u32 start = mapping.Addr;
|
|
|
|
u32 end = mapping.Addr + mapping.Size;
|
|
|
|
|
2020-07-31 20:39:27 +00:00
|
|
|
printf("unmapping %d %x %x %x %x\n", region, mapping.Addr, mapping.Size, mapping.Num, mapping.LocalOffset);
|
2020-07-23 15:43:25 +00:00
|
|
|
|
2020-11-13 14:20:53 +00:00
|
|
|
bool overlap = (NDS::ARM9->DTCMSize > 0 && oldDTCMBase < end && oldDTCBEnd > start)
|
|
|
|
|| (newSize > 0 && newBase < end && newEnd > start);
|
2020-07-23 15:43:25 +00:00
|
|
|
|
2020-11-13 14:20:53 +00:00
|
|
|
if (mapping.Num == 0 && overlap)
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
|
|
|
mapping.Unmap(region);
|
|
|
|
Mappings[region].Remove(i);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < Mappings[memregion_DTCM].Length; i++)
|
|
|
|
{
|
|
|
|
Mappings[memregion_DTCM][i].Unmap(memregion_DTCM);
|
|
|
|
}
|
|
|
|
Mappings[memregion_DTCM].Clear();
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
void RemapNWRAM(int num)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
for (int i = 0; i < Mappings[memregion_SharedWRAM].Length;)
|
|
|
|
{
|
|
|
|
Mapping& mapping = Mappings[memregion_SharedWRAM][i];
|
2020-11-13 14:20:53 +00:00
|
|
|
if (DSi::NWRAMStart[mapping.Num][num] < mapping.Addr + mapping.Size
|
|
|
|
&& DSi::NWRAMEnd[mapping.Num][num] > mapping.Addr)
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
|
|
|
mapping.Unmap(memregion_SharedWRAM);
|
|
|
|
Mappings[memregion_SharedWRAM].Remove(i);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (int i = 0; i < Mappings[memregion_NewSharedWRAM_A + num].Length; i++)
|
|
|
|
{
|
|
|
|
Mappings[memregion_NewSharedWRAM_A + num][i].Unmap(memregion_NewSharedWRAM_A + num);
|
|
|
|
}
|
|
|
|
Mappings[memregion_NewSharedWRAM_A + num].Clear();
|
2020-06-30 21:50:41 +00:00
|
|
|
}
|
|
|
|
|
2020-06-14 19:04:25 +00:00
|
|
|
void RemapSWRAM()
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
printf("remapping SWRAM\n");
|
|
|
|
for (int i = 0; i < Mappings[memregion_WRAM7].Length;)
|
|
|
|
{
|
|
|
|
Mapping& mapping = Mappings[memregion_WRAM7][i];
|
2020-11-13 14:20:53 +00:00
|
|
|
if (mapping.Addr + mapping.Size <= 0x03800000)
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
|
|
|
mapping.Unmap(memregion_WRAM7);
|
|
|
|
Mappings[memregion_WRAM7].Remove(i);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
for (int i = 0; i < Mappings[memregion_SharedWRAM].Length; i++)
|
|
|
|
{
|
|
|
|
Mappings[memregion_SharedWRAM][i].Unmap(memregion_SharedWRAM);
|
|
|
|
}
|
|
|
|
Mappings[memregion_SharedWRAM].Clear();
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool MapAtAddress(u32 addr)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
u32 num = NDS::CurCPU;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
int region = num == 0
|
|
|
|
? ClassifyAddress9(addr)
|
|
|
|
: ClassifyAddress7(addr);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
if (!IsFastmemCompatible(region))
|
|
|
|
return false;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u32 mirrorStart, mirrorSize, memoryOffset;
|
|
|
|
bool isMapped = GetMirrorLocation(region, num, addr, memoryOffset, mirrorStart, mirrorSize);
|
|
|
|
if (!isMapped)
|
|
|
|
return false;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* states = num == 0 ? MappingStatus9 : MappingStatus7;
|
2020-11-13 14:20:53 +00:00
|
|
|
printf("mapping mirror %x, %x %x %d %d\n", mirrorStart, mirrorSize, memoryOffset, region, num);
|
2020-07-23 15:43:25 +00:00
|
|
|
bool isExecutable = ARMJIT::CodeMemRegions[region];
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-11-13 14:20:53 +00:00
|
|
|
u32 dtcmStart = NDS::ARM9->DTCMBase;
|
|
|
|
u32 dtcmSize = NDS::ARM9->DTCMSize;
|
|
|
|
u32 dtcmEnd = dtcmStart + dtcmSize;
|
2020-07-04 16:58:00 +00:00
|
|
|
#ifndef __SWITCH__
|
2020-11-13 14:20:53 +00:00
|
|
|
#ifndef _WIN32
|
|
|
|
if (num == 0
|
|
|
|
&& dtcmEnd >= mirrorStart
|
|
|
|
&& dtcmStart < mirrorStart + mirrorSize)
|
2020-11-09 19:43:31 +00:00
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
bool success;
|
|
|
|
if (dtcmStart > mirrorStart)
|
2020-11-09 19:43:31 +00:00
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
success = MapIntoRange(mirrorStart, 0, OffsetsPerRegion[region] + memoryOffset, dtcmStart - mirrorStart);
|
|
|
|
assert(success);
|
|
|
|
}
|
|
|
|
if (dtcmEnd < mirrorStart + mirrorSize)
|
|
|
|
{
|
|
|
|
u32 offset = dtcmStart - mirrorStart + dtcmSize;
|
|
|
|
success = MapIntoRange(dtcmEnd, 0, OffsetsPerRegion[region] + memoryOffset + offset, mirrorSize - offset);
|
|
|
|
assert(success);
|
2020-11-09 19:43:31 +00:00
|
|
|
}
|
|
|
|
}
|
2020-11-13 14:20:53 +00:00
|
|
|
else
|
|
|
|
#endif
|
|
|
|
{
|
|
|
|
bool succeded = MapIntoRange(mirrorStart, num, OffsetsPerRegion[region] + memoryOffset, mirrorSize);
|
|
|
|
assert(succeded);
|
|
|
|
}
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
ARMJIT::AddressRange* range = ARMJIT::CodeMemRegions[region] + memoryOffset / 512;
|
|
|
|
|
|
|
|
// this overcomplicated piece of code basically just finds whole pieces of code memory
|
2020-11-13 14:20:53 +00:00
|
|
|
// which can be mapped/protected
|
2020-07-23 15:43:25 +00:00
|
|
|
u32 offset = 0;
|
|
|
|
bool skipDTCM = num == 0 && region != memregion_DTCM;
|
|
|
|
while (offset < mirrorSize)
|
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
if (skipDTCM && mirrorStart + offset == dtcmStart)
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
2020-11-13 14:20:53 +00:00
|
|
|
#ifdef _WIN32
|
|
|
|
SetCodeProtectionRange(dtcmStart, dtcmSize, 0, 0);
|
|
|
|
#endif
|
|
|
|
offset += dtcmSize;
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
u32 sectionOffset = offset;
|
|
|
|
bool hasCode = isExecutable && ARMJIT::PageContainsCode(&range[offset / 512]);
|
2020-08-25 16:13:17 +00:00
|
|
|
while (offset < mirrorSize
|
|
|
|
&& (!isExecutable || ARMJIT::PageContainsCode(&range[offset / 512]) == hasCode)
|
2020-07-23 15:43:25 +00:00
|
|
|
&& (!skipDTCM || mirrorStart + offset != NDS::ARM9->DTCMBase))
|
|
|
|
{
|
|
|
|
assert(states[(mirrorStart + offset) >> 12] == memstate_Unmapped);
|
|
|
|
states[(mirrorStart + offset) >> 12] = hasCode ? memstate_MappedProtected : memstate_MappedRW;
|
|
|
|
offset += 0x1000;
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 sectionSize = offset - sectionOffset;
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
#if defined(__SWITCH__)
|
2020-07-23 15:43:25 +00:00
|
|
|
if (!hasCode)
|
|
|
|
{
|
|
|
|
printf("trying to map %x (size: %x) from %x\n", mirrorStart + sectionOffset, sectionSize, sectionOffset + memoryOffset + OffsetsPerRegion[region]);
|
|
|
|
bool succeded = MapIntoRange(mirrorStart + sectionOffset, num, sectionOffset + memoryOffset + OffsetsPerRegion[region], sectionSize);
|
|
|
|
assert(succeded);
|
|
|
|
}
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
2020-07-23 15:43:25 +00:00
|
|
|
if (hasCode)
|
|
|
|
{
|
|
|
|
SetCodeProtectionRange(mirrorStart + sectionOffset, sectionSize, num, 1);
|
|
|
|
}
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
assert(num == 0 || num == 1);
|
|
|
|
Mapping mapping{mirrorStart, mirrorSize, memoryOffset, num};
|
|
|
|
Mappings[region].Add(mapping);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-11-13 14:20:53 +00:00
|
|
|
//printf("mapped mirror at %08x-%08x\n", mirrorStart, mirrorStart + mirrorSize - 1);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
return true;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
bool FaultHandler(FaultDescription& faultDesc)
|
2020-06-14 19:04:25 +00:00
|
|
|
{
|
2020-11-09 19:43:31 +00:00
|
|
|
if (ARMJIT::JITCompiler->IsJITFault(faultDesc.FaultPC))
|
2020-07-23 15:43:25 +00:00
|
|
|
{
|
|
|
|
bool rewriteToSlowPath = true;
|
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
u8* memStatus = NDS::CurCPU == 0 ? MappingStatus9 : MappingStatus7;
|
2020-07-23 15:43:25 +00:00
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
if (memStatus[faultDesc.EmulatedFaultAddr >> 12] == memstate_Unmapped)
|
|
|
|
rewriteToSlowPath = !MapAtAddress(faultDesc.EmulatedFaultAddr);
|
2020-07-23 15:43:25 +00:00
|
|
|
|
|
|
|
if (rewriteToSlowPath)
|
2020-11-09 19:43:31 +00:00
|
|
|
faultDesc.FaultPC = ARMJIT::JITCompiler->RewriteMemAccess(faultDesc.FaultPC);
|
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-12-09 17:58:51 +00:00
|
|
|
const u64 AddrSpaceSize = 0x100000000;
|
|
|
|
|
2020-06-14 19:04:25 +00:00
|
|
|
void Init()
|
|
|
|
{
|
|
|
|
#if defined(__SWITCH__)
|
2020-12-09 17:58:51 +00:00
|
|
|
MemoryBase = (u8*)aligned_alloc(0x1000, MemoryTotalSize);
|
2021-01-05 13:36:15 +00:00
|
|
|
virtmemLock();
|
|
|
|
MemoryBaseCodeMem = (u8*)virtmemFindCodeMemory(MemoryTotalSize, 0x1000);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
|
|
|
bool succeded = R_SUCCEEDED(svcMapProcessCodeMemory(envGetOwnProcessHandle(), (u64)MemoryBaseCodeMem,
|
|
|
|
(u64)MemoryBase, MemoryTotalSize));
|
|
|
|
assert(succeded);
|
2020-07-23 15:43:25 +00:00
|
|
|
succeded = R_SUCCEEDED(svcSetProcessMemoryPermission(envGetOwnProcessHandle(), (u64)MemoryBaseCodeMem,
|
2020-06-14 19:04:25 +00:00
|
|
|
MemoryTotalSize, Perm_Rw));
|
2020-07-23 15:43:25 +00:00
|
|
|
assert(succeded);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
// 8 GB of address space, just don't ask...
|
2021-01-05 13:36:15 +00:00
|
|
|
FastMem9Start = virtmemFindAslr(AddrSpaceSize, 0x1000);
|
2020-07-23 15:43:25 +00:00
|
|
|
assert(FastMem9Start);
|
2021-01-05 13:36:15 +00:00
|
|
|
FastMem7Start = virtmemFindAslr(AddrSpaceSize, 0x1000);
|
2020-07-23 15:43:25 +00:00
|
|
|
assert(FastMem7Start);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2021-01-05 13:36:15 +00:00
|
|
|
FastMem9Reservation = virtmemAddReservation(FastMem9Start, AddrSpaceSize);
|
|
|
|
FastMem7Reservation = virtmemAddReservation(FastMem7Start, AddrSpaceSize);
|
|
|
|
virtmemUnlock();
|
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* basePtr = MemoryBaseCodeMem;
|
2020-06-30 21:50:41 +00:00
|
|
|
#elif defined(_WIN32)
|
2020-07-23 15:43:25 +00:00
|
|
|
ExceptionHandlerHandle = AddVectoredExceptionHandler(1, ExceptionHandler);
|
2020-06-30 21:50:41 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
MemoryFile = CreateFileMapping(INVALID_HANDLE_VALUE, NULL, PAGE_READWRITE, 0, MemoryTotalSize, NULL);
|
2020-06-14 19:04:25 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
MemoryBase = (u8*)VirtualAlloc(NULL, MemoryTotalSize, MEM_RESERVE, PAGE_READWRITE);
|
2020-06-30 21:50:41 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
FastMem9Start = VirtualAlloc(NULL, AddrSpaceSize, MEM_RESERVE, PAGE_READWRITE);
|
|
|
|
FastMem7Start = VirtualAlloc(NULL, AddrSpaceSize, MEM_RESERVE, PAGE_READWRITE);
|
2020-06-30 21:50:41 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
// only free them after they have all been reserved
|
|
|
|
// so they can't overlap
|
|
|
|
VirtualFree(MemoryBase, 0, MEM_RELEASE);
|
|
|
|
VirtualFree(FastMem9Start, 0, MEM_RELEASE);
|
|
|
|
VirtualFree(FastMem7Start, 0, MEM_RELEASE);
|
2020-06-30 21:50:41 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
MapViewOfFileEx(MemoryFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, MemoryTotalSize, MemoryBase);
|
2020-06-30 21:50:41 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* basePtr = MemoryBase;
|
2020-07-04 16:58:00 +00:00
|
|
|
#else
|
2020-11-09 19:43:31 +00:00
|
|
|
// this used to be allocated with three different mmaps
|
|
|
|
// The idea was to give the OS more freedom where to position the buffers,
|
|
|
|
// but something was bad about this so instead we take this vmem eating monster
|
|
|
|
// which seems to work better.
|
|
|
|
MemoryBase = (u8*)mmap(NULL, AddrSpaceSize*4, PROT_NONE, MAP_ANON | MAP_PRIVATE, -1, 0);
|
|
|
|
munmap(MemoryBase, AddrSpaceSize*4);
|
|
|
|
FastMem9Start = MemoryBase;
|
|
|
|
FastMem7Start = MemoryBase + AddrSpaceSize;
|
|
|
|
MemoryBase = MemoryBase + AddrSpaceSize*2;
|
2020-07-04 16:58:00 +00:00
|
|
|
|
2020-11-30 14:33:43 +00:00
|
|
|
#if defined(__ANDROID__)
|
|
|
|
static void* libandroid = dlopen("libandroid.so", RTLD_LAZY | RTLD_LOCAL);
|
|
|
|
using type_ASharedMemory_create = int(*)(const char* name, size_t size);
|
|
|
|
static void* symbol = dlsym(libandroid, "ASharedMemory_create");
|
|
|
|
static auto shared_memory_create = reinterpret_cast<type_ASharedMemory_create>(symbol);
|
|
|
|
|
|
|
|
if (shared_memory_create)
|
|
|
|
{
|
|
|
|
MemoryFile = shared_memory_create("melondsfastmem", MemoryTotalSize);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
int fd = open(ASHMEM_DEVICE, O_RDWR);
|
|
|
|
ioctl(fd, ASHMEM_SET_NAME, "melondsfastmem");
|
|
|
|
ioctl(fd, ASHMEM_SET_SIZE, MemoryTotalSize);
|
|
|
|
MemoryFile = fd;
|
|
|
|
}
|
|
|
|
#else
|
2021-01-22 18:20:32 +00:00
|
|
|
char fastmemPidName[snprintf(NULL, 0, "/melondsfastmem%d", getpid()) + 1];
|
|
|
|
sprintf(fastmemPidName, "/melondsfastmem%d", getpid());
|
|
|
|
MemoryFile = shm_open(fastmemPidName, O_RDWR | O_CREAT | O_EXCL, 0600);
|
|
|
|
if (MemoryFile == -1)
|
|
|
|
{
|
|
|
|
printf("Failed to open memory using shm_open!");
|
|
|
|
}
|
|
|
|
shm_unlink(fastmemPidName);
|
2020-11-30 14:33:43 +00:00
|
|
|
#endif
|
2021-01-22 18:20:32 +00:00
|
|
|
if (ftruncate(MemoryFile, MemoryTotalSize) < 0)
|
|
|
|
{
|
|
|
|
printf("Failed to allocate memory using ftruncate!");
|
|
|
|
}
|
2020-07-04 16:58:00 +00:00
|
|
|
|
2020-11-09 19:43:31 +00:00
|
|
|
struct sigaction sa;
|
|
|
|
sa.sa_handler = nullptr;
|
|
|
|
sa.sa_sigaction = &SigsegvHandler;
|
|
|
|
sa.sa_flags = SA_SIGINFO;
|
|
|
|
sigemptyset(&sa.sa_mask);
|
|
|
|
sigaction(SIGSEGV, &sa, &OldSaSegv);
|
|
|
|
#ifdef __APPLE__
|
|
|
|
sigaction(SIGBUS, &sa, &OldSaBus);
|
|
|
|
#endif
|
2020-07-04 16:58:00 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
mmap(MemoryBase, MemoryTotalSize, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_FIXED, MemoryFile, 0);
|
2020-07-04 16:58:00 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
u8* basePtr = MemoryBase;
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
2020-07-23 15:43:25 +00:00
|
|
|
NDS::MainRAM = basePtr + MemBlockMainRAMOffset;
|
|
|
|
NDS::SharedWRAM = basePtr + MemBlockSWRAMOffset;
|
|
|
|
NDS::ARM7WRAM = basePtr + MemBlockARM7WRAMOffset;
|
|
|
|
NDS::ARM9->DTCM = basePtr + MemBlockDTCMOffset;
|
|
|
|
DSi::NWRAM_A = basePtr + MemBlockNWRAM_AOffset;
|
|
|
|
DSi::NWRAM_B = basePtr + MemBlockNWRAM_BOffset;
|
|
|
|
DSi::NWRAM_C = basePtr + MemBlockNWRAM_COffset;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void DeInit()
|
|
|
|
{
|
|
|
|
#if defined(__SWITCH__)
|
2021-01-05 13:36:15 +00:00
|
|
|
virtmemLock();
|
|
|
|
virtmemRemoveReservation(FastMem9Reservation);
|
|
|
|
virtmemRemoveReservation(FastMem7Reservation);
|
|
|
|
virtmemUnlock();
|
2020-06-14 19:04:25 +00:00
|
|
|
|
|
|
|
svcUnmapProcessCodeMemory(envGetOwnProcessHandle(), (u64)MemoryBaseCodeMem, (u64)MemoryBase, MemoryTotalSize);
|
|
|
|
free(MemoryBase);
|
2020-06-30 21:50:41 +00:00
|
|
|
#elif defined(_WIN32)
|
2020-07-23 15:43:25 +00:00
|
|
|
assert(UnmapViewOfFile(MemoryBase));
|
|
|
|
CloseHandle(MemoryFile);
|
2020-06-30 21:50:41 +00:00
|
|
|
|
2020-07-23 15:43:25 +00:00
|
|
|
RemoveVectoredExceptionHandler(ExceptionHandlerHandle);
|
2020-11-09 19:43:31 +00:00
|
|
|
#else
|
|
|
|
sigaction(SIGSEGV, &OldSaSegv, nullptr);
|
|
|
|
#ifdef __APPLE__
|
|
|
|
sigaction(SIGBUS, &OldSaBus, nullptr);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
munmap(MemoryBase, MemoryTotalSize);
|
|
|
|
close(MemoryFile);
|
2020-06-14 19:04:25 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void Reset()
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
for (int region = 0; region < memregions_Count; region++)
|
|
|
|
{
|
|
|
|
for (int i = 0; i < Mappings[region].Length; i++)
|
|
|
|
Mappings[region][i].Unmap(region);
|
|
|
|
Mappings[region].Clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < sizeof(MappingStatus9); i++)
|
|
|
|
{
|
|
|
|
assert(MappingStatus9[i] == memstate_Unmapped);
|
|
|
|
assert(MappingStatus7[i] == memstate_Unmapped);
|
|
|
|
}
|
|
|
|
|
|
|
|
printf("done resetting jit mem\n");
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
bool IsFastmemCompatible(int region)
|
2020-06-14 19:04:25 +00:00
|
|
|
{
|
2020-06-30 21:50:41 +00:00
|
|
|
#ifdef _WIN32
|
2020-07-23 15:43:25 +00:00
|
|
|
/*
|
|
|
|
TODO: with some hacks, the smaller shared WRAM regions
|
|
|
|
could be mapped in some occaisons as well
|
|
|
|
*/
|
|
|
|
if (region == memregion_DTCM
|
|
|
|
|| region == memregion_SharedWRAM
|
|
|
|
|| region == memregion_NewSharedWRAM_B
|
|
|
|
|| region == memregion_NewSharedWRAM_C)
|
|
|
|
return false;
|
2020-06-30 21:50:41 +00:00
|
|
|
#endif
|
2020-11-13 14:20:53 +00:00
|
|
|
return OffsetsPerRegion[region] != UINT32_MAX;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
bool GetMirrorLocation(int region, u32 num, u32 addr, u32& memoryOffset, u32& mirrorStart, u32& mirrorSize)
|
2020-06-14 19:04:25 +00:00
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
memoryOffset = 0;
|
|
|
|
switch (region)
|
|
|
|
{
|
|
|
|
case memregion_ITCM:
|
|
|
|
if (num == 0)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~(ITCMPhysicalSize - 1);
|
|
|
|
mirrorSize = ITCMPhysicalSize;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_DTCM:
|
|
|
|
if (num == 0)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~(DTCMPhysicalSize - 1);
|
|
|
|
mirrorSize = DTCMPhysicalSize;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_MainRAM:
|
|
|
|
mirrorStart = addr & ~NDS::MainRAMMask;
|
|
|
|
mirrorSize = NDS::MainRAMMask + 1;
|
|
|
|
return true;
|
|
|
|
case memregion_BIOS9:
|
|
|
|
if (num == 0)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~0xFFF;
|
|
|
|
mirrorSize = 0x1000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_BIOS7:
|
|
|
|
if (num == 1)
|
|
|
|
{
|
|
|
|
mirrorStart = 0;
|
|
|
|
mirrorSize = 0x4000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_SharedWRAM:
|
|
|
|
if (num == 0 && NDS::SWRAM_ARM9.Mem)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~NDS::SWRAM_ARM9.Mask;
|
|
|
|
mirrorSize = NDS::SWRAM_ARM9.Mask + 1;
|
|
|
|
memoryOffset = NDS::SWRAM_ARM9.Mem - NDS::SharedWRAM;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else if (num == 1 && NDS::SWRAM_ARM7.Mem)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~NDS::SWRAM_ARM7.Mask;
|
|
|
|
mirrorSize = NDS::SWRAM_ARM7.Mask + 1;
|
|
|
|
memoryOffset = NDS::SWRAM_ARM7.Mem - NDS::SharedWRAM;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_WRAM7:
|
|
|
|
if (num == 1)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~(NDS::ARM7WRAMSize - 1);
|
|
|
|
mirrorSize = NDS::ARM7WRAMSize;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_VRAM:
|
|
|
|
if (num == 0)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~0xFFFFF;
|
|
|
|
mirrorSize = 0x100000;
|
2020-08-09 11:29:04 +00:00
|
|
|
return true;
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_VWRAM:
|
|
|
|
if (num == 1)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~0x3FFFF;
|
|
|
|
mirrorSize = 0x40000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_NewSharedWRAM_A:
|
|
|
|
{
|
|
|
|
u8* ptr = DSi::NWRAMMap_A[num][(addr >> 16) & DSi::NWRAMMask[num][0]];
|
|
|
|
if (ptr)
|
|
|
|
{
|
|
|
|
memoryOffset = ptr - DSi::NWRAM_A;
|
|
|
|
mirrorStart = addr & ~0xFFFF;
|
|
|
|
mirrorSize = 0x10000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false; // zero filled memory
|
|
|
|
}
|
|
|
|
case memregion_NewSharedWRAM_B:
|
|
|
|
{
|
|
|
|
u8* ptr = DSi::NWRAMMap_B[num][(addr >> 15) & DSi::NWRAMMask[num][1]];
|
|
|
|
if (ptr)
|
|
|
|
{
|
|
|
|
memoryOffset = ptr - DSi::NWRAM_B;
|
|
|
|
mirrorStart = addr & ~0x7FFF;
|
|
|
|
mirrorSize = 0x8000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false; // zero filled memory
|
|
|
|
}
|
|
|
|
case memregion_NewSharedWRAM_C:
|
|
|
|
{
|
|
|
|
u8* ptr = DSi::NWRAMMap_C[num][(addr >> 15) & DSi::NWRAMMask[num][2]];
|
|
|
|
if (ptr)
|
|
|
|
{
|
|
|
|
memoryOffset = ptr - DSi::NWRAM_C;
|
|
|
|
mirrorStart = addr & ~0x7FFF;
|
|
|
|
mirrorSize = 0x8000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false; // zero filled memory
|
|
|
|
}
|
|
|
|
case memregion_BIOS9DSi:
|
|
|
|
if (num == 0)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~0xFFFF;
|
|
|
|
mirrorSize = DSi::SCFG_BIOS & (1<<0) ? 0x8000 : 0x10000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
case memregion_BIOS7DSi:
|
|
|
|
if (num == 1)
|
|
|
|
{
|
|
|
|
mirrorStart = addr & ~0xFFFF;
|
|
|
|
mirrorSize = DSi::SCFG_BIOS & (1<<8) ? 0x8000 : 0x10000;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
default:
|
|
|
|
assert(false && "For the time being this should only be used for code");
|
|
|
|
return false;
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-06-30 21:50:41 +00:00
|
|
|
u32 LocaliseAddress(int region, u32 num, u32 addr)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
switch (region)
|
|
|
|
{
|
|
|
|
case memregion_ITCM:
|
|
|
|
return (addr & (ITCMPhysicalSize - 1)) | (memregion_ITCM << 27);
|
|
|
|
case memregion_MainRAM:
|
|
|
|
return (addr & NDS::MainRAMMask) | (memregion_MainRAM << 27);
|
|
|
|
case memregion_BIOS9:
|
|
|
|
return (addr & 0xFFF) | (memregion_BIOS9 << 27);
|
|
|
|
case memregion_BIOS7:
|
|
|
|
return (addr & 0x3FFF) | (memregion_BIOS7 << 27);
|
|
|
|
case memregion_SharedWRAM:
|
|
|
|
if (num == 0)
|
|
|
|
return ((addr & NDS::SWRAM_ARM9.Mask) + (NDS::SWRAM_ARM9.Mem - NDS::SharedWRAM)) | (memregion_SharedWRAM << 27);
|
|
|
|
else
|
|
|
|
return ((addr & NDS::SWRAM_ARM7.Mask) + (NDS::SWRAM_ARM7.Mem - NDS::SharedWRAM)) | (memregion_SharedWRAM << 27);
|
|
|
|
case memregion_WRAM7:
|
|
|
|
return (addr & (NDS::ARM7WRAMSize - 1)) | (memregion_WRAM7 << 27);
|
|
|
|
case memregion_VRAM:
|
|
|
|
// TODO: take mapping properly into account
|
|
|
|
return (addr & 0xFFFFF) | (memregion_VRAM << 27);
|
|
|
|
case memregion_VWRAM:
|
|
|
|
// same here
|
|
|
|
return (addr & 0x3FFFF) | (memregion_VWRAM << 27);
|
|
|
|
case memregion_NewSharedWRAM_A:
|
|
|
|
{
|
|
|
|
u8* ptr = DSi::NWRAMMap_A[num][(addr >> 16) & DSi::NWRAMMask[num][0]];
|
|
|
|
if (ptr)
|
|
|
|
return (ptr - DSi::NWRAM_A + (addr & 0xFFFF)) | (memregion_NewSharedWRAM_A << 27);
|
|
|
|
else
|
|
|
|
return memregion_Other << 27; // zero filled memory
|
|
|
|
}
|
|
|
|
case memregion_NewSharedWRAM_B:
|
|
|
|
{
|
|
|
|
u8* ptr = DSi::NWRAMMap_B[num][(addr >> 15) & DSi::NWRAMMask[num][1]];
|
|
|
|
if (ptr)
|
|
|
|
return (ptr - DSi::NWRAM_B + (addr & 0x7FFF)) | (memregion_NewSharedWRAM_B << 27);
|
|
|
|
else
|
|
|
|
return memregion_Other << 27;
|
|
|
|
}
|
|
|
|
case memregion_NewSharedWRAM_C:
|
|
|
|
{
|
|
|
|
u8* ptr = DSi::NWRAMMap_C[num][(addr >> 15) & DSi::NWRAMMask[num][2]];
|
|
|
|
if (ptr)
|
|
|
|
return (ptr - DSi::NWRAM_C + (addr & 0x7FFF)) | (memregion_NewSharedWRAM_C << 27);
|
|
|
|
else
|
|
|
|
return memregion_Other << 27;
|
|
|
|
}
|
|
|
|
case memregion_BIOS9DSi:
|
|
|
|
case memregion_BIOS7DSi:
|
|
|
|
return (addr & 0xFFFF) | (region << 27);
|
|
|
|
default:
|
|
|
|
assert(false && "This should only be needed for regions which can contain code");
|
|
|
|
return memregion_Other << 27;
|
|
|
|
}
|
2020-06-30 21:50:41 +00:00
|
|
|
}
|
|
|
|
|
2020-06-14 19:04:25 +00:00
|
|
|
int ClassifyAddress9(u32 addr)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
if (addr < NDS::ARM9->ITCMSize)
|
|
|
|
{
|
|
|
|
return memregion_ITCM;
|
|
|
|
}
|
|
|
|
else if (addr >= NDS::ARM9->DTCMBase && addr < (NDS::ARM9->DTCMBase + NDS::ARM9->DTCMSize))
|
|
|
|
{
|
|
|
|
return memregion_DTCM;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if (NDS::ConsoleType == 1 && addr >= 0xFFFF0000 && !(DSi::SCFG_BIOS & (1<<1)))
|
|
|
|
{
|
|
|
|
if ((addr >= 0xFFFF8000) && (DSi::SCFG_BIOS & (1<<0)))
|
|
|
|
return memregion_Other;
|
|
|
|
|
|
|
|
return memregion_BIOS9DSi;
|
|
|
|
}
|
|
|
|
else if ((addr & 0xFFFFF000) == 0xFFFF0000)
|
|
|
|
{
|
|
|
|
return memregion_BIOS9;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (addr & 0xFF000000)
|
|
|
|
{
|
|
|
|
case 0x02000000:
|
|
|
|
return memregion_MainRAM;
|
|
|
|
case 0x03000000:
|
|
|
|
if (NDS::ConsoleType == 1)
|
|
|
|
{
|
|
|
|
if (addr >= DSi::NWRAMStart[0][0] && addr < DSi::NWRAMEnd[0][0])
|
|
|
|
return memregion_NewSharedWRAM_A;
|
|
|
|
if (addr >= DSi::NWRAMStart[0][1] && addr < DSi::NWRAMEnd[0][1])
|
|
|
|
return memregion_NewSharedWRAM_B;
|
|
|
|
if (addr >= DSi::NWRAMStart[0][2] && addr < DSi::NWRAMEnd[0][2])
|
|
|
|
return memregion_NewSharedWRAM_C;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (NDS::SWRAM_ARM9.Mem)
|
|
|
|
return memregion_SharedWRAM;
|
|
|
|
return memregion_Other;
|
|
|
|
case 0x04000000:
|
|
|
|
return memregion_IO9;
|
|
|
|
case 0x06000000:
|
|
|
|
return memregion_VRAM;
|
|
|
|
default:
|
|
|
|
return memregion_Other;
|
|
|
|
}
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int ClassifyAddress7(u32 addr)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
if (NDS::ConsoleType == 1 && addr < 0x00010000 && !(DSi::SCFG_BIOS & (1<<9)))
|
2020-06-30 21:50:41 +00:00
|
|
|
{
|
|
|
|
if (addr >= 0x00008000 && DSi::SCFG_BIOS & (1<<8))
|
|
|
|
return memregion_Other;
|
|
|
|
|
|
|
|
return memregion_BIOS7DSi;
|
|
|
|
}
|
2020-07-23 15:43:25 +00:00
|
|
|
else if (addr < 0x00004000)
|
|
|
|
{
|
|
|
|
return memregion_BIOS7;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
switch (addr & 0xFF800000)
|
|
|
|
{
|
|
|
|
case 0x02000000:
|
|
|
|
case 0x02800000:
|
|
|
|
return memregion_MainRAM;
|
|
|
|
case 0x03000000:
|
|
|
|
if (NDS::ConsoleType == 1)
|
|
|
|
{
|
|
|
|
if (addr >= DSi::NWRAMStart[1][0] && addr < DSi::NWRAMEnd[1][0])
|
|
|
|
return memregion_NewSharedWRAM_A;
|
|
|
|
if (addr >= DSi::NWRAMStart[1][1] && addr < DSi::NWRAMEnd[1][1])
|
|
|
|
return memregion_NewSharedWRAM_B;
|
|
|
|
if (addr >= DSi::NWRAMStart[1][2] && addr < DSi::NWRAMEnd[1][2])
|
|
|
|
return memregion_NewSharedWRAM_C;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (NDS::SWRAM_ARM7.Mem)
|
|
|
|
return memregion_SharedWRAM;
|
|
|
|
return memregion_WRAM7;
|
|
|
|
case 0x03800000:
|
|
|
|
return memregion_WRAM7;
|
|
|
|
case 0x04000000:
|
|
|
|
return memregion_IO7;
|
|
|
|
case 0x04800000:
|
|
|
|
return memregion_Wifi;
|
|
|
|
case 0x06000000:
|
|
|
|
case 0x06800000:
|
|
|
|
return memregion_VWRAM;
|
2020-11-09 19:43:31 +00:00
|
|
|
|
|
|
|
default:
|
|
|
|
return memregion_Other;
|
2020-07-23 15:43:25 +00:00
|
|
|
}
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void WifiWrite32(u32 addr, u32 val)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
Wifi::Write(addr, val & 0xFFFF);
|
|
|
|
Wifi::Write(addr + 2, val >> 16);
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
u32 WifiRead32(u32 addr)
|
|
|
|
{
|
2020-08-14 21:38:47 +00:00
|
|
|
return (u32)Wifi::Read(addr) | ((u32)Wifi::Read(addr + 2) << 16);
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
void VRAMWrite(u32 addr, T val)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
switch (addr & 0x00E00000)
|
|
|
|
{
|
|
|
|
case 0x00000000: GPU::WriteVRAM_ABG<T>(addr, val); return;
|
|
|
|
case 0x00200000: GPU::WriteVRAM_BBG<T>(addr, val); return;
|
|
|
|
case 0x00400000: GPU::WriteVRAM_AOBJ<T>(addr, val); return;
|
|
|
|
case 0x00600000: GPU::WriteVRAM_BOBJ<T>(addr, val); return;
|
|
|
|
default: GPU::WriteVRAM_LCDC<T>(addr, val); return;
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
template <typename T>
|
|
|
|
T VRAMRead(u32 addr)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
switch (addr & 0x00E00000)
|
|
|
|
{
|
|
|
|
case 0x00000000: return GPU::ReadVRAM_ABG<T>(addr);
|
|
|
|
case 0x00200000: return GPU::ReadVRAM_BBG<T>(addr);
|
|
|
|
case 0x00400000: return GPU::ReadVRAM_AOBJ<T>(addr);
|
|
|
|
case 0x00600000: return GPU::ReadVRAM_BOBJ<T>(addr);
|
|
|
|
default: return GPU::ReadVRAM_LCDC<T>(addr);
|
|
|
|
}
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void* GetFuncForAddr(ARM* cpu, u32 addr, bool store, int size)
|
|
|
|
{
|
2020-07-23 15:43:25 +00:00
|
|
|
if (cpu->Num == 0)
|
|
|
|
{
|
|
|
|
switch (addr & 0xFF000000)
|
|
|
|
{
|
|
|
|
case 0x04000000:
|
|
|
|
if (!store && size == 32 && addr == 0x04100010 && NDS::ExMemCnt[0] & (1<<11))
|
|
|
|
return (void*)NDSCart::ReadROMData;
|
|
|
|
|
|
|
|
/*
|
|
|
|
unfortunately we can't map GPU2D this way
|
|
|
|
since it's hidden inside an object
|
|
|
|
|
|
|
|
though GPU3D registers are accessed much more intensive
|
|
|
|
*/
|
|
|
|
if (addr >= 0x04000320 && addr < 0x040006A4)
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)GPU3D::Read8;
|
|
|
|
case 9: return (void*)GPU3D::Write8;
|
|
|
|
case 16: return (void*)GPU3D::Read16;
|
|
|
|
case 17: return (void*)GPU3D::Write16;
|
|
|
|
case 32: return (void*)GPU3D::Read32;
|
|
|
|
case 33: return (void*)GPU3D::Write32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (NDS::ConsoleType == 0)
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)NDS::ARM9IORead8;
|
|
|
|
case 9: return (void*)NDS::ARM9IOWrite8;
|
|
|
|
case 16: return (void*)NDS::ARM9IORead16;
|
|
|
|
case 17: return (void*)NDS::ARM9IOWrite16;
|
|
|
|
case 32: return (void*)NDS::ARM9IORead32;
|
|
|
|
case 33: return (void*)NDS::ARM9IOWrite32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)DSi::ARM9IORead8;
|
|
|
|
case 9: return (void*)DSi::ARM9IOWrite8;
|
|
|
|
case 16: return (void*)DSi::ARM9IORead16;
|
|
|
|
case 17: return (void*)DSi::ARM9IOWrite16;
|
|
|
|
case 32: return (void*)DSi::ARM9IORead32;
|
|
|
|
case 33: return (void*)DSi::ARM9IOWrite32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case 0x06000000:
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)VRAMRead<u8>;
|
|
|
|
case 9: return NULL;
|
|
|
|
case 16: return (void*)VRAMRead<u16>;
|
|
|
|
case 17: return (void*)VRAMWrite<u16>;
|
|
|
|
case 32: return (void*)VRAMRead<u32>;
|
|
|
|
case 33: return (void*)VRAMWrite<u32>;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
switch (addr & 0xFF800000)
|
|
|
|
{
|
|
|
|
case 0x04000000:
|
|
|
|
if (addr >= 0x04000400 && addr < 0x04000520)
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)SPU::Read8;
|
|
|
|
case 9: return (void*)SPU::Write8;
|
|
|
|
case 16: return (void*)SPU::Read16;
|
|
|
|
case 17: return (void*)SPU::Write16;
|
|
|
|
case 32: return (void*)SPU::Read32;
|
|
|
|
case 33: return (void*)SPU::Write32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (NDS::ConsoleType == 0)
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)NDS::ARM7IORead8;
|
|
|
|
case 9: return (void*)NDS::ARM7IOWrite8;
|
|
|
|
case 16: return (void*)NDS::ARM7IORead16;
|
|
|
|
case 17: return (void*)NDS::ARM7IOWrite16;
|
|
|
|
case 32: return (void*)NDS::ARM7IORead32;
|
|
|
|
case 33: return (void*)NDS::ARM7IOWrite32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)DSi::ARM7IORead8;
|
|
|
|
case 9: return (void*)DSi::ARM7IOWrite8;
|
|
|
|
case 16: return (void*)DSi::ARM7IORead16;
|
|
|
|
case 17: return (void*)DSi::ARM7IOWrite16;
|
|
|
|
case 32: return (void*)DSi::ARM7IORead32;
|
|
|
|
case 33: return (void*)DSi::ARM7IOWrite32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case 0x04800000:
|
|
|
|
if (addr < 0x04810000 && size >= 16)
|
|
|
|
{
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 16: return (void*)Wifi::Read;
|
|
|
|
case 17: return (void*)Wifi::Write;
|
|
|
|
case 32: return (void*)WifiRead32;
|
|
|
|
case 33: return (void*)WifiWrite32;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case 0x06000000:
|
|
|
|
case 0x06800000:
|
|
|
|
switch (size | store)
|
|
|
|
{
|
|
|
|
case 8: return (void*)GPU::ReadVRAM_ARM7<u8>;
|
|
|
|
case 9: return (void*)GPU::WriteVRAM_ARM7<u8>;
|
|
|
|
case 16: return (void*)GPU::ReadVRAM_ARM7<u16>;
|
|
|
|
case 17: return (void*)GPU::WriteVRAM_ARM7<u16>;
|
|
|
|
case 32: return (void*)GPU::ReadVRAM_ARM7<u32>;
|
|
|
|
case 33: return (void*)GPU::WriteVRAM_ARM7<u32>;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return NULL;
|
2020-06-14 19:04:25 +00:00
|
|
|
}
|
|
|
|
|
2020-11-29 16:11:33 +00:00
|
|
|
}
|