mmu: max alignment requirement is 32-bit
a 64-bit access only needs to be 32-bit aligned. use enum for mmu errors
This commit is contained in:
parent
afcb3b7ad8
commit
bf79183bd6
|
@ -96,8 +96,8 @@ DynarecCodeEntryPtr DYNACALL bm_GetCodeByVAddr(u32 addr)
|
||||||
}
|
}
|
||||||
|
|
||||||
u32 paddr;
|
u32 paddr;
|
||||||
u32 rv = mmu_instruction_translation(addr, paddr);
|
MmuError rv = mmu_instruction_translation(addr, paddr);
|
||||||
if (rv != MMU_ERROR_NONE)
|
if (rv != MmuError::NONE)
|
||||||
{
|
{
|
||||||
DoMMUException(addr, rv, MMU_TT_IREAD);
|
DoMMUException(addr, rv, MMU_TT_IREAD);
|
||||||
mmu_instruction_translation(next_pc, paddr);
|
mmu_instruction_translation(next_pc, paddr);
|
||||||
|
|
|
@ -145,8 +145,8 @@ bool RuntimeBlockInfo::Setup(u32 rpc,fpscr_t rfpu_cfg)
|
||||||
}
|
}
|
||||||
else if (mmu_enabled())
|
else if (mmu_enabled())
|
||||||
{
|
{
|
||||||
u32 rv = mmu_instruction_translation(vaddr, addr);
|
MmuError rv = mmu_instruction_translation(vaddr, addr);
|
||||||
if (rv != MMU_ERROR_NONE)
|
if (rv != MmuError::NONE)
|
||||||
{
|
{
|
||||||
DoMMUException(vaddr, rv, MMU_TT_IREAD);
|
DoMMUException(vaddr, rv, MMU_TT_IREAD);
|
||||||
return false;
|
return false;
|
||||||
|
@ -463,10 +463,10 @@ static bool translateAddress(u32 addr, int size, u32 access, u32& outAddr, Runti
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
u32 paddr;
|
u32 paddr;
|
||||||
u32 rv = access == MMU_TT_DREAD ?
|
MmuError rv = access == MMU_TT_DREAD ?
|
||||||
mmu_data_translation<MMU_TT_DREAD>(addr, paddr)
|
mmu_data_translation<MMU_TT_DREAD>(addr, paddr)
|
||||||
: mmu_data_translation<MMU_TT_DWRITE>(addr, paddr);
|
: mmu_data_translation<MMU_TT_DWRITE>(addr, paddr);
|
||||||
if (rv != MMU_ERROR_NONE)
|
if (rv != MmuError::NONE)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
addr = paddr;
|
addr = paddr;
|
||||||
|
@ -478,18 +478,20 @@ static bool translateAddress(u32 addr, int size, u32 access, u32& outAddr, Runti
|
||||||
|
|
||||||
bool rdv_readMemImmediate(u32 addr, int size, void*& ptr, bool& isRam, u32& physAddr, RuntimeBlockInfo* block)
|
bool rdv_readMemImmediate(u32 addr, int size, void*& ptr, bool& isRam, u32& physAddr, RuntimeBlockInfo* block)
|
||||||
{
|
{
|
||||||
|
size = std::min(size, 4);
|
||||||
if (!translateAddress(addr, size, MMU_TT_DREAD, physAddr, block))
|
if (!translateAddress(addr, size, MMU_TT_DREAD, physAddr, block))
|
||||||
return false;
|
return false;
|
||||||
ptr = addrspace::readConst(physAddr, isRam, size > 4 ? 4 : size);
|
ptr = addrspace::readConst(physAddr, isRam, size);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool rdv_writeMemImmediate(u32 addr, int size, void*& ptr, bool& isRam, u32& physAddr, RuntimeBlockInfo* block)
|
bool rdv_writeMemImmediate(u32 addr, int size, void*& ptr, bool& isRam, u32& physAddr, RuntimeBlockInfo* block)
|
||||||
{
|
{
|
||||||
|
size = std::min(size, 4);
|
||||||
if (!translateAddress(addr, size, MMU_TT_DWRITE, physAddr, block))
|
if (!translateAddress(addr, size, MMU_TT_DWRITE, physAddr, block))
|
||||||
return false;
|
return false;
|
||||||
ptr = addrspace::writeConst(physAddr, isRam, size > 4 ? 4 : size);
|
ptr = addrspace::writeConst(physAddr, isRam, size);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -203,7 +203,7 @@ void ITLB_Sync(u32 entry)
|
||||||
}
|
}
|
||||||
|
|
||||||
//Do a full lookup on the UTLB entry's
|
//Do a full lookup on the UTLB entry's
|
||||||
u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
MmuError mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
{
|
{
|
||||||
if (lru_entry != NULL)
|
if (lru_entry != NULL)
|
||||||
{
|
{
|
||||||
|
@ -218,7 +218,7 @@ u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
if (tlb_entry_ret != nullptr)
|
if (tlb_entry_ret != nullptr)
|
||||||
*tlb_entry_ret = lru_entry;
|
*tlb_entry_ret = lru_entry;
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const TLB_Entry *localEntry;
|
const TLB_Entry *localEntry;
|
||||||
|
@ -233,7 +233,7 @@ u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
lru_mask = mask;
|
lru_mask = mask;
|
||||||
lru_address = ((*tlb_entry_ret)->Address.VPN << 10);
|
lru_address = ((*tlb_entry_ret)->Address.VPN << 10);
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_WINCE_HACK
|
#ifdef USE_WINCE_HACK
|
||||||
|
@ -256,50 +256,50 @@ u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
|
|
||||||
cache_entry(entry);
|
cache_entry(entry);
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return MMU_ERROR_TLB_MISS;
|
return MmuError::TLB_MISS;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<u32 translation_type>
|
template<u32 translation_type>
|
||||||
u32 mmu_full_SQ(u32 va, u32& rv)
|
MmuError mmu_full_SQ(u32 va, u32& rv)
|
||||||
{
|
{
|
||||||
u32 lookup = mmu_full_lookup(va, nullptr, rv);
|
MmuError lookup = mmu_full_lookup(va, nullptr, rv);
|
||||||
|
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
rv &= ~31;//lower 5 bits are forced to 0
|
rv &= ~31;//lower 5 bits are forced to 0
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
template u32 mmu_full_SQ<MMU_TT_DREAD>(u32 va, u32& rv);
|
template MmuError mmu_full_SQ<MMU_TT_DREAD>(u32 va, u32& rv);
|
||||||
template u32 mmu_full_SQ<MMU_TT_DWRITE>(u32 va, u32& rv);
|
template MmuError mmu_full_SQ<MMU_TT_DWRITE>(u32 va, u32& rv);
|
||||||
|
|
||||||
template<u32 translation_type>
|
template<u32 translation_type>
|
||||||
u32 mmu_data_translation(u32 va, u32& rv)
|
MmuError mmu_data_translation(u32 va, u32& rv)
|
||||||
{
|
{
|
||||||
if (fast_reg_lut[va >> 29] != 0)
|
if (fast_reg_lut[va >> 29] != 0)
|
||||||
{
|
{
|
||||||
rv = va;
|
rv = va;
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((va & 0xFC000000) == 0x7C000000)
|
if ((va & 0xFC000000) == 0x7C000000)
|
||||||
{
|
{
|
||||||
// On-chip RAM area isn't translated
|
// On-chip RAM area isn't translated
|
||||||
rv = va;
|
rv = va;
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
u32 lookup = mmu_full_lookup(va, nullptr, rv);
|
MmuError lookup = mmu_full_lookup(va, nullptr, rv);
|
||||||
if (lookup == MMU_ERROR_NONE && (rv & 0x1C000000) == 0x1C000000)
|
if (lookup == MmuError::NONE && (rv & 0x1C000000) == 0x1C000000)
|
||||||
// map 1C000000-1FFFFFFF to P4 memory-mapped registers
|
// map 1C000000-1FFFFFFF to P4 memory-mapped registers
|
||||||
rv |= 0xF0000000;
|
rv |= 0xF0000000;
|
||||||
#ifdef TRACE_WINCE_SYSCALLS
|
#ifdef TRACE_WINCE_SYSCALLS
|
||||||
if (unresolved_unicode_string != 0 && lookup == MMU_ERROR_NONE)
|
if (unresolved_unicode_string != 0 && lookup == MmuError::NONE)
|
||||||
{
|
{
|
||||||
if (va == unresolved_unicode_string)
|
if (va == unresolved_unicode_string)
|
||||||
{
|
{
|
||||||
|
@ -311,8 +311,8 @@ u32 mmu_data_translation(u32 va, u32& rv)
|
||||||
|
|
||||||
return lookup;
|
return lookup;
|
||||||
}
|
}
|
||||||
template u32 mmu_data_translation<MMU_TT_DREAD>(u32 va, u32& rv);
|
template MmuError mmu_data_translation<MMU_TT_DREAD>(u32 va, u32& rv);
|
||||||
template u32 mmu_data_translation<MMU_TT_DWRITE>(u32 va, u32& rv);
|
template MmuError mmu_data_translation<MMU_TT_DWRITE>(u32 va, u32& rv);
|
||||||
|
|
||||||
void mmu_flush_table()
|
void mmu_flush_table()
|
||||||
{
|
{
|
||||||
|
|
|
@ -83,7 +83,7 @@ void ITLB_Sync(u32 entry)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
template<typename F>
|
template<typename F>
|
||||||
static void mmuException(u32 mmu_error, u32 address, u32 am, F raise)
|
static void mmuException(MmuError mmu_error, u32 address, u32 am, F raise)
|
||||||
{
|
{
|
||||||
printf_mmu("MMU exception -> pc = 0x%X : ", next_pc);
|
printf_mmu("MMU exception -> pc = 0x%X : ", next_pc);
|
||||||
CCN_TEA = address;
|
CCN_TEA = address;
|
||||||
|
@ -91,26 +91,26 @@ static void mmuException(u32 mmu_error, u32 address, u32 am, F raise)
|
||||||
|
|
||||||
switch (mmu_error)
|
switch (mmu_error)
|
||||||
{
|
{
|
||||||
case MMU_ERROR_NONE:
|
case MmuError::NONE:
|
||||||
die("Error: mmu_error == MMU_ERROR_NONE)");
|
die("Error: mmu_error == MmuError::NONE)");
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case MMU_ERROR_TLB_MISS:
|
case MmuError::TLB_MISS:
|
||||||
printf_mmu("MMU_ERROR_UTLB_MISS 0x%X, handled", address);
|
printf_mmu("MmuError::UTLB_MISS 0x%X, handled", address);
|
||||||
if (am == MMU_TT_DWRITE)
|
if (am == MMU_TT_DWRITE)
|
||||||
raise(Sh4Ex_TlbMissWrite);
|
raise(Sh4Ex_TlbMissWrite);
|
||||||
else
|
else
|
||||||
raise(Sh4Ex_TlbMissRead);
|
raise(Sh4Ex_TlbMissRead);
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case MMU_ERROR_TLB_MHIT:
|
case MmuError::TLB_MHIT:
|
||||||
ERROR_LOG(SH4, "MMU_ERROR_TLB_MHIT @ 0x%X", address);
|
ERROR_LOG(SH4, "MmuError::TLB_MHIT @ 0x%X", address);
|
||||||
raise(Sh4Ex_TlbMultiHit);
|
raise(Sh4Ex_TlbMultiHit);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
//Mem is read/write protected (depends on translation type)
|
//Mem is read/write protected (depends on translation type)
|
||||||
case MMU_ERROR_PROTECTED:
|
case MmuError::PROTECTED:
|
||||||
printf_mmu("MMU_ERROR_PROTECTED 0x%X, handled", address);
|
printf_mmu("MmuError::PROTECTED 0x%X, handled", address);
|
||||||
if (am == MMU_TT_DWRITE)
|
if (am == MMU_TT_DWRITE)
|
||||||
raise(Sh4Ex_TlbProtViolWrite);
|
raise(Sh4Ex_TlbProtViolWrite);
|
||||||
else
|
else
|
||||||
|
@ -118,22 +118,22 @@ static void mmuException(u32 mmu_error, u32 address, u32 am, F raise)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
//Mem is write protected , firstwrite
|
//Mem is write protected , firstwrite
|
||||||
case MMU_ERROR_FIRSTWRITE:
|
case MmuError::FIRSTWRITE:
|
||||||
printf_mmu("MMU_ERROR_FIRSTWRITE");
|
printf_mmu("MmuError::FIRSTWRITE");
|
||||||
verify(am == MMU_TT_DWRITE);
|
verify(am == MMU_TT_DWRITE);
|
||||||
raise(Sh4Ex_TlbInitPageWrite);
|
raise(Sh4Ex_TlbInitPageWrite);
|
||||||
return;
|
return;
|
||||||
|
|
||||||
//data read/write misaligned
|
//data read/write misaligned
|
||||||
case MMU_ERROR_BADADDR:
|
case MmuError::BADADDR:
|
||||||
if (am == MMU_TT_DWRITE) //WADDERR - Write Data Address Error
|
if (am == MMU_TT_DWRITE) //WADDERR - Write Data Address Error
|
||||||
{
|
{
|
||||||
printf_mmu("MMU_ERROR_BADADDR(dw) 0x%X", address);
|
printf_mmu("MmuError::BADADDR(dw) 0x%X", address);
|
||||||
raise(Sh4Ex_AddressErrorWrite);
|
raise(Sh4Ex_AddressErrorWrite);
|
||||||
}
|
}
|
||||||
else if (am == MMU_TT_DREAD) //RADDERR - Read Data Address Error
|
else if (am == MMU_TT_DREAD) //RADDERR - Read Data Address Error
|
||||||
{
|
{
|
||||||
printf_mmu("MMU_ERROR_BADADDR(dr) 0x%X", address);
|
printf_mmu("MmuError::BADADDR(dr) 0x%X", address);
|
||||||
raise(Sh4Ex_AddressErrorRead);
|
raise(Sh4Ex_AddressErrorRead);
|
||||||
}
|
}
|
||||||
else //IADDERR - Instruction Address Error
|
else //IADDERR - Instruction Address Error
|
||||||
|
@ -141,7 +141,7 @@ static void mmuException(u32 mmu_error, u32 address, u32 am, F raise)
|
||||||
#ifdef TRACE_WINCE_SYSCALLS
|
#ifdef TRACE_WINCE_SYSCALLS
|
||||||
if (!print_wince_syscall(address))
|
if (!print_wince_syscall(address))
|
||||||
#endif
|
#endif
|
||||||
printf_mmu("MMU_ERROR_BADADDR(i) 0x%X", address);
|
printf_mmu("MmuError::BADADDR(i) 0x%X", address);
|
||||||
raise(Sh4Ex_AddressErrorRead);
|
raise(Sh4Ex_AddressErrorRead);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
|
@ -151,7 +151,7 @@ static void mmuException(u32 mmu_error, u32 address, u32 am, F raise)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[[noreturn]] void mmu_raise_exception(u32 mmu_error, u32 address, u32 am)
|
[[noreturn]] void mmu_raise_exception(MmuError mmu_error, u32 address, u32 am)
|
||||||
{
|
{
|
||||||
mmuException(mmu_error, address, am, [](Sh4ExceptionCode event) {
|
mmuException(mmu_error, address, am, [](Sh4ExceptionCode event) {
|
||||||
debugger::debugTrap(event); // FIXME CCN_TEA and CCN_PTEH have been updated already
|
debugger::debugTrap(event); // FIXME CCN_TEA and CCN_PTEH have been updated already
|
||||||
|
@ -162,7 +162,7 @@ static void mmuException(u32 mmu_error, u32 address, u32 am, F raise)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void DoMMUException(u32 address, u32 mmu_error, u32 access_type)
|
void DoMMUException(u32 address, MmuError mmu_error, u32 access_type)
|
||||||
{
|
{
|
||||||
mmuException(mmu_error, address, access_type, [](Sh4ExceptionCode event) {
|
mmuException(mmu_error, address, access_type, [](Sh4ExceptionCode event) {
|
||||||
Do_Exception(next_pc, event);
|
Do_Exception(next_pc, event);
|
||||||
|
@ -190,7 +190,7 @@ bool mmu_match(u32 va, CCN_PTEH_type Address, CCN_PTEL_type Data)
|
||||||
|
|
||||||
#ifndef FAST_MMU
|
#ifndef FAST_MMU
|
||||||
//Do a full lookup on the UTLB entry's
|
//Do a full lookup on the UTLB entry's
|
||||||
u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
MmuError mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
{
|
{
|
||||||
CCN_MMUCR.URC++;
|
CCN_MMUCR.URC++;
|
||||||
if (CCN_MMUCR.URB == CCN_MMUCR.URC)
|
if (CCN_MMUCR.URB == CCN_MMUCR.URC)
|
||||||
|
@ -202,7 +202,7 @@ u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
if (mmu_match(va, tlb_entry.Address, tlb_entry.Data))
|
if (mmu_match(va, tlb_entry.Address, tlb_entry.Data))
|
||||||
{
|
{
|
||||||
if (*tlb_entry_ret != nullptr)
|
if (*tlb_entry_ret != nullptr)
|
||||||
return MMU_ERROR_TLB_MHIT;
|
return MmuError::TLB_MHIT;
|
||||||
*tlb_entry_ret = &tlb_entry;
|
*tlb_entry_ret = &tlb_entry;
|
||||||
u32 sz = tlb_entry.Data.SZ1 * 2 + tlb_entry.Data.SZ0;
|
u32 sz = tlb_entry.Data.SZ1 * 2 + tlb_entry.Data.SZ0;
|
||||||
u32 mask = mmu_mask[sz];
|
u32 mask = mmu_mask[sz];
|
||||||
|
@ -212,9 +212,9 @@ u32 mmu_full_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (*tlb_entry_ret == nullptr)
|
if (*tlb_entry_ret == nullptr)
|
||||||
return MMU_ERROR_TLB_MISS;
|
return MmuError::TLB_MISS;
|
||||||
else
|
else
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Simple QACR translation for mmu (when AT is off)
|
//Simple QACR translation for mmu (when AT is off)
|
||||||
|
@ -228,87 +228,87 @@ static u32 mmu_QACR_SQ(u32 va)
|
||||||
}
|
}
|
||||||
|
|
||||||
template<u32 translation_type>
|
template<u32 translation_type>
|
||||||
u32 mmu_full_SQ(u32 va, u32& rv)
|
MmuError mmu_full_SQ(u32 va, u32& rv)
|
||||||
{
|
{
|
||||||
|
|
||||||
if ((va & 3) || (CCN_MMUCR.SQMD == 1 && sr.MD == 0))
|
if ((va & 3) || (CCN_MMUCR.SQMD == 1 && sr.MD == 0))
|
||||||
//here, or after ?
|
//here, or after ?
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
|
|
||||||
if (CCN_MMUCR.AT)
|
if (CCN_MMUCR.AT)
|
||||||
{
|
{
|
||||||
//Address=Dest&0xFFFFFFE0;
|
//Address=Dest&0xFFFFFFE0;
|
||||||
|
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
u32 lookup = mmu_full_lookup(va, &entry, rv);
|
MmuError lookup = mmu_full_lookup(va, &entry, rv);
|
||||||
|
|
||||||
rv &= ~31;//lower 5 bits are forced to 0
|
rv &= ~31;//lower 5 bits are forced to 0
|
||||||
|
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
u32 md = entry->Data.PR >> 1;
|
u32 md = entry->Data.PR >> 1;
|
||||||
|
|
||||||
//Priv mode protection
|
//Priv mode protection
|
||||||
if (md == 0 && sr.MD == 0)
|
if (md == 0 && sr.MD == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
|
|
||||||
//Write Protection (Lock or FW)
|
//Write Protection (Lock or FW)
|
||||||
if (translation_type == MMU_TT_DWRITE)
|
if (translation_type == MMU_TT_DWRITE)
|
||||||
{
|
{
|
||||||
if ((entry->Data.PR & 1) == 0)
|
if ((entry->Data.PR & 1) == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
else if (entry->Data.D == 0)
|
else if (entry->Data.D == 0)
|
||||||
return MMU_ERROR_FIRSTWRITE;
|
return MmuError::FIRSTWRITE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
rv = mmu_QACR_SQ(va);
|
rv = mmu_QACR_SQ(va);
|
||||||
}
|
}
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
template u32 mmu_full_SQ<MMU_TT_DREAD>(u32 va, u32& rv);
|
template MmuError mmu_full_SQ<MMU_TT_DREAD>(u32 va, u32& rv);
|
||||||
template u32 mmu_full_SQ<MMU_TT_DWRITE>(u32 va, u32& rv);
|
template MmuError mmu_full_SQ<MMU_TT_DWRITE>(u32 va, u32& rv);
|
||||||
|
|
||||||
template<u32 translation_type>
|
template<u32 translation_type>
|
||||||
u32 mmu_data_translation(u32 va, u32& rv)
|
MmuError mmu_data_translation(u32 va, u32& rv)
|
||||||
{
|
{
|
||||||
if (translation_type == MMU_TT_DWRITE)
|
if (translation_type == MMU_TT_DWRITE)
|
||||||
{
|
{
|
||||||
if ((va & 0xFC000000) == 0xE0000000)
|
if ((va & 0xFC000000) == 0xE0000000)
|
||||||
{
|
{
|
||||||
u32 lookup = mmu_full_SQ<MMU_TT_DWRITE>(va, rv);
|
MmuError lookup = mmu_full_SQ<MMU_TT_DWRITE>(va, rv);
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
rv = va; //SQ writes are not translated, only write backs are.
|
rv = va; //SQ writes are not translated, only write backs are.
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sr.MD == 0 && (va & 0x80000000) != 0)
|
if (sr.MD == 0 && (va & 0x80000000) != 0)
|
||||||
//if on kernel, and not SQ addr -> error
|
//if on kernel, and not SQ addr -> error
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
|
|
||||||
if ((va & 0xFC000000) == 0x7C000000)
|
if ((va & 0xFC000000) == 0x7C000000)
|
||||||
{
|
{
|
||||||
// 7C000000 to 7FFFFFFF in P0/U0 not translated
|
// 7C000000 to 7FFFFFFF in P0/U0 not translated
|
||||||
rv = va;
|
rv = va;
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fast_reg_lut[va >> 29] != 0)
|
if (fast_reg_lut[va >> 29] != 0)
|
||||||
{
|
{
|
||||||
// P1, P2 and P4 aren't translated
|
// P1, P2 and P4 aren't translated
|
||||||
rv = va;
|
rv = va;
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
u32 lookup = mmu_full_lookup(va, &entry, rv);
|
u32 lookup = mmu_full_lookup(va, &entry, rv);
|
||||||
|
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
#ifdef TRACE_WINCE_SYSCALLS
|
#ifdef TRACE_WINCE_SYSCALLS
|
||||||
|
@ -327,7 +327,7 @@ u32 mmu_data_translation(u32 va, u32& rv)
|
||||||
//0X & User mode-> protection violation
|
//0X & User mode-> protection violation
|
||||||
//Priv mode protection
|
//Priv mode protection
|
||||||
if (md == 0 && sr.MD == 0)
|
if (md == 0 && sr.MD == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
|
|
||||||
//X0 -> read olny
|
//X0 -> read olny
|
||||||
//X1 -> read/write , can be FW
|
//X1 -> read/write , can be FW
|
||||||
|
@ -336,39 +336,39 @@ u32 mmu_data_translation(u32 va, u32& rv)
|
||||||
if (translation_type == MMU_TT_DWRITE)
|
if (translation_type == MMU_TT_DWRITE)
|
||||||
{
|
{
|
||||||
if ((entry->Data.PR & 1) == 0)
|
if ((entry->Data.PR & 1) == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
else if (entry->Data.D == 0)
|
else if (entry->Data.D == 0)
|
||||||
return MMU_ERROR_FIRSTWRITE;
|
return MmuError::FIRSTWRITE;
|
||||||
}
|
}
|
||||||
if ((rv & 0x1C000000) == 0x1C000000)
|
if ((rv & 0x1C000000) == 0x1C000000)
|
||||||
// map 1C000000-1FFFFFFF to P4 memory-mapped registers
|
// map 1C000000-1FFFFFFF to P4 memory-mapped registers
|
||||||
rv |= 0xF0000000;
|
rv |= 0xF0000000;
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
template u32 mmu_data_translation<MMU_TT_DREAD>(u32 va, u32& rv);
|
template MmuError mmu_data_translation<MMU_TT_DREAD>(u32 va, u32& rv);
|
||||||
template u32 mmu_data_translation<MMU_TT_DWRITE>(u32 va, u32& rv);
|
template MmuError mmu_data_translation<MMU_TT_DWRITE>(u32 va, u32& rv);
|
||||||
|
|
||||||
u32 mmu_instruction_translation(u32 va, u32& rv)
|
MmuError mmu_instruction_translation(u32 va, u32& rv)
|
||||||
{
|
{
|
||||||
if (sr.MD == 0 && (va & 0x80000000) != 0)
|
if (sr.MD == 0 && (va & 0x80000000) != 0)
|
||||||
// User mode on kernel address
|
// User mode on kernel address
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
|
|
||||||
if ((va >> 29) == 7)
|
if ((va >> 29) == 7)
|
||||||
// P4 not executable
|
// P4 not executable
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
|
|
||||||
if (fast_reg_lut[va >> 29] != 0)
|
if (fast_reg_lut[va >> 29] != 0)
|
||||||
{
|
{
|
||||||
// P1 and P2 aren't translated
|
// P1 and P2 aren't translated
|
||||||
rv = va;
|
rv = va;
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
u32 lookup = mmu_instruction_lookup(va, &entry, rv);
|
MmuError lookup = mmu_instruction_lookup(va, &entry, rv);
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
u32 md = entry->Data.PR >> 1;
|
u32 md = entry->Data.PR >> 1;
|
||||||
|
@ -376,13 +376,13 @@ u32 mmu_instruction_translation(u32 va, u32& rv)
|
||||||
//0X & User mode-> protection violation
|
//0X & User mode-> protection violation
|
||||||
//Priv mode protection
|
//Priv mode protection
|
||||||
if (md == 0 && sr.MD == 0)
|
if (md == 0 && sr.MD == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
u32 mmu_instruction_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
MmuError mmu_instruction_lookup(u32 va, const TLB_Entry** tlb_entry_ret, u32& rv)
|
||||||
{
|
{
|
||||||
bool mmach = false;
|
bool mmach = false;
|
||||||
retry_ITLB_Match:
|
retry_ITLB_Match:
|
||||||
|
@ -401,7 +401,7 @@ retry_ITLB_Match:
|
||||||
if (!needAsidMatch || entry.Address.ASID == CCN_PTEH.ASID)
|
if (!needAsidMatch || entry.Address.ASID == CCN_PTEH.ASID)
|
||||||
{
|
{
|
||||||
if (*tlb_entry_ret != nullptr)
|
if (*tlb_entry_ret != nullptr)
|
||||||
return MMU_ERROR_TLB_MHIT;
|
return MmuError::TLB_MHIT;
|
||||||
*tlb_entry_ret = &entry;
|
*tlb_entry_ret = &entry;
|
||||||
//VPN->PPN | low bits
|
//VPN->PPN | low bits
|
||||||
rv = ((entry.Data.PPN << 10) & mask) | (va & ~mask);
|
rv = ((entry.Data.PPN << 10) & mask) | (va & ~mask);
|
||||||
|
@ -413,8 +413,8 @@ retry_ITLB_Match:
|
||||||
{
|
{
|
||||||
verify(mmach == false);
|
verify(mmach == false);
|
||||||
const TLB_Entry *tlb_entry;
|
const TLB_Entry *tlb_entry;
|
||||||
u32 lookup = mmu_full_lookup(va, &tlb_entry, rv);
|
MmuError lookup = mmu_full_lookup(va, &tlb_entry, rv);
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
u32 replace_index = ITLB_LRU_USE[CCN_MMUCR.LRUI];
|
u32 replace_index = ITLB_LRU_USE[CCN_MMUCR.LRUI];
|
||||||
|
@ -428,7 +428,7 @@ retry_ITLB_Match:
|
||||||
CCN_MMUCR.LRUI &= ITLB_LRU_AND[*tlb_entry_ret - ITLB];
|
CCN_MMUCR.LRUI &= ITLB_LRU_AND[*tlb_entry_ret - ITLB];
|
||||||
CCN_MMUCR.LRUI |= ITLB_LRU_OR[*tlb_entry_ret - ITLB];
|
CCN_MMUCR.LRUI |= ITLB_LRU_OR[*tlb_entry_ret - ITLB];
|
||||||
|
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
void mmu_set_state()
|
void mmu_set_state()
|
||||||
|
@ -505,12 +505,12 @@ void mmu_flush_table()
|
||||||
template<typename T>
|
template<typename T>
|
||||||
T DYNACALL mmu_ReadMem(u32 adr)
|
T DYNACALL mmu_ReadMem(u32 adr)
|
||||||
{
|
{
|
||||||
if (adr & (sizeof(T) - 1))
|
if (adr & (std::min((int)sizeof(T), 4) - 1))
|
||||||
// Unaligned
|
// Unaligned
|
||||||
mmu_raise_exception(MMU_ERROR_BADADDR, adr, MMU_TT_DREAD);
|
mmu_raise_exception(MmuError::BADADDR, adr, MMU_TT_DREAD);
|
||||||
u32 addr;
|
u32 addr;
|
||||||
u32 rv = mmu_data_translation<MMU_TT_DREAD>(adr, addr);
|
MmuError rv = mmu_data_translation<MMU_TT_DREAD>(adr, addr);
|
||||||
if (rv != MMU_ERROR_NONE)
|
if (rv != MmuError::NONE)
|
||||||
mmu_raise_exception(rv, adr, MMU_TT_DREAD);
|
mmu_raise_exception(rv, adr, MMU_TT_DREAD);
|
||||||
return addrspace::readt<T>(addr);
|
return addrspace::readt<T>(addr);
|
||||||
}
|
}
|
||||||
|
@ -523,10 +523,10 @@ u16 DYNACALL mmu_IReadMem16(u32 vaddr)
|
||||||
{
|
{
|
||||||
if (vaddr & (sizeof(u16) - 1))
|
if (vaddr & (sizeof(u16) - 1))
|
||||||
// Unaligned
|
// Unaligned
|
||||||
mmu_raise_exception(MMU_ERROR_BADADDR, vaddr, MMU_TT_DREAD);
|
mmu_raise_exception(MmuError::BADADDR, vaddr, MMU_TT_IREAD);
|
||||||
u32 addr;
|
u32 addr;
|
||||||
u32 rv = mmu_instruction_translation(vaddr, addr);
|
MmuError rv = mmu_instruction_translation(vaddr, addr);
|
||||||
if (rv != MMU_ERROR_NONE)
|
if (rv != MmuError::NONE)
|
||||||
mmu_raise_exception(rv, vaddr, MMU_TT_IREAD);
|
mmu_raise_exception(rv, vaddr, MMU_TT_IREAD);
|
||||||
return addrspace::read16(addr);
|
return addrspace::read16(addr);
|
||||||
}
|
}
|
||||||
|
@ -534,12 +534,12 @@ u16 DYNACALL mmu_IReadMem16(u32 vaddr)
|
||||||
template<typename T>
|
template<typename T>
|
||||||
void DYNACALL mmu_WriteMem(u32 adr, T data)
|
void DYNACALL mmu_WriteMem(u32 adr, T data)
|
||||||
{
|
{
|
||||||
if (adr & (sizeof(T) - 1))
|
if (adr & (std::min((int)sizeof(T), 4) - 1))
|
||||||
// Unaligned
|
// Unaligned
|
||||||
mmu_raise_exception(MMU_ERROR_BADADDR, adr, MMU_TT_DWRITE);
|
mmu_raise_exception(MmuError::BADADDR, adr, MMU_TT_DWRITE);
|
||||||
u32 addr;
|
u32 addr;
|
||||||
u32 rv = mmu_data_translation<MMU_TT_DWRITE>(adr, addr);
|
MmuError rv = mmu_data_translation<MMU_TT_DWRITE>(adr, addr);
|
||||||
if (rv != MMU_ERROR_NONE)
|
if (rv != MmuError::NONE)
|
||||||
mmu_raise_exception(rv, adr, MMU_TT_DWRITE);
|
mmu_raise_exception(rv, adr, MMU_TT_DWRITE);
|
||||||
addrspace::writet<T>(addr, data);
|
addrspace::writet<T>(addr, data);
|
||||||
}
|
}
|
||||||
|
@ -560,8 +560,8 @@ void mmu_TranslateSQW(u32 adr, u32 *out)
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
u32 addr;
|
u32 addr;
|
||||||
u32 tv = mmu_full_SQ<MMU_TT_DREAD>(adr, addr);
|
MmuError tv = mmu_full_SQ<MMU_TT_DREAD>(adr, addr);
|
||||||
if (tv != MMU_ERROR_NONE)
|
if (tv != MmuError::NONE)
|
||||||
mmu_raise_exception(tv, adr, MMU_TT_DREAD);
|
mmu_raise_exception(tv, adr, MMU_TT_DREAD);
|
||||||
|
|
||||||
*out = addr;
|
*out = addr;
|
||||||
|
|
|
@ -12,19 +12,21 @@
|
||||||
//Data write
|
//Data write
|
||||||
#define MMU_TT_DREAD 2
|
#define MMU_TT_DREAD 2
|
||||||
|
|
||||||
//Return Values
|
enum class MmuError
|
||||||
//Translation was successful
|
{
|
||||||
#define MMU_ERROR_NONE 0
|
//Translation was successful
|
||||||
//TLB miss
|
NONE,
|
||||||
#define MMU_ERROR_TLB_MISS 1
|
//TLB miss
|
||||||
//TLB Multihit
|
TLB_MISS,
|
||||||
#define MMU_ERROR_TLB_MHIT 2
|
//TLB Multihit
|
||||||
//Mem is read/write protected (depends on translation type)
|
TLB_MHIT,
|
||||||
#define MMU_ERROR_PROTECTED 3
|
//Mem is read/write protected (depends on translation type)
|
||||||
//Mem is write protected , firstwrite
|
PROTECTED,
|
||||||
#define MMU_ERROR_FIRSTWRITE 4
|
//Mem is write protected , firstwrite
|
||||||
//data-Opcode read/write misaligned
|
FIRSTWRITE,
|
||||||
#define MMU_ERROR_BADADDR 5
|
//data-Opcode read/write misaligned
|
||||||
|
BADADDR
|
||||||
|
};
|
||||||
|
|
||||||
struct TLB_Entry
|
struct TLB_Entry
|
||||||
{
|
{
|
||||||
|
@ -61,25 +63,25 @@ void ITLB_Sync(u32 entry);
|
||||||
bool mmu_match(u32 va, CCN_PTEH_type Address, CCN_PTEL_type Data);
|
bool mmu_match(u32 va, CCN_PTEH_type Address, CCN_PTEL_type Data);
|
||||||
void mmu_set_state();
|
void mmu_set_state();
|
||||||
void mmu_flush_table();
|
void mmu_flush_table();
|
||||||
[[noreturn]] void mmu_raise_exception(u32 mmu_error, u32 address, u32 am);
|
[[noreturn]] void mmu_raise_exception(MmuError mmu_error, u32 address, u32 am);
|
||||||
|
|
||||||
static inline bool mmu_enabled()
|
static inline bool mmu_enabled()
|
||||||
{
|
{
|
||||||
return mmuOn;
|
return mmuOn;
|
||||||
}
|
}
|
||||||
|
|
||||||
u32 mmu_full_lookup(u32 va, const TLB_Entry **entry, u32& rv);
|
MmuError mmu_full_lookup(u32 va, const TLB_Entry **entry, u32& rv);
|
||||||
u32 mmu_instruction_lookup(u32 va, const TLB_Entry **entry, u32& rv);
|
MmuError mmu_instruction_lookup(u32 va, const TLB_Entry **entry, u32& rv);
|
||||||
template<u32 translation_type>
|
template<u32 translation_type>
|
||||||
u32 mmu_full_SQ(u32 va, u32& rv);
|
MmuError mmu_full_SQ(u32 va, u32& rv);
|
||||||
|
|
||||||
#ifdef FAST_MMU
|
#ifdef FAST_MMU
|
||||||
static inline u32 mmu_instruction_translation(u32 va, u32& rv)
|
static inline MmuError mmu_instruction_translation(u32 va, u32& rv)
|
||||||
{
|
{
|
||||||
if (fast_reg_lut[va >> 29] != 0)
|
if (fast_reg_lut[va >> 29] != 0)
|
||||||
{
|
{
|
||||||
rv = va;
|
rv = va;
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
return mmu_full_lookup(va, nullptr, rv);
|
return mmu_full_lookup(va, nullptr, rv);
|
||||||
|
@ -89,13 +91,13 @@ u32 mmu_instruction_translation(u32 va, u32& rv);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
template<u32 translation_type>
|
template<u32 translation_type>
|
||||||
u32 mmu_data_translation(u32 va, u32& rv);
|
MmuError mmu_data_translation(u32 va, u32& rv);
|
||||||
void DoMMUException(u32 addr, u32 mmu_error, u32 access_type);
|
void DoMMUException(u32 addr, MmuError mmu_error, u32 access_type);
|
||||||
|
|
||||||
inline static bool mmu_is_translated(u32 va, u32 size)
|
inline static bool mmu_is_translated(u32 va, u32 size)
|
||||||
{
|
{
|
||||||
#ifndef FAST_MMU
|
#ifndef FAST_MMU
|
||||||
if (va & (size - 1))
|
if (va & (std::min(size, 4u) - 1))
|
||||||
return true;
|
return true;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -133,13 +135,13 @@ static inline void mmuAddressLUTFlush(bool full)
|
||||||
static inline u32 DYNACALL mmuDynarecLookup(u32 vaddr, u32 write, u32 pc)
|
static inline u32 DYNACALL mmuDynarecLookup(u32 vaddr, u32 write, u32 pc)
|
||||||
{
|
{
|
||||||
u32 paddr;
|
u32 paddr;
|
||||||
u32 rv;
|
MmuError rv;
|
||||||
// TODO pass access size so that alignment errors are raised
|
// TODO pass access size so that alignment errors are raised
|
||||||
if (write)
|
if (write)
|
||||||
rv = mmu_data_translation<MMU_TT_DWRITE>(vaddr, paddr);
|
rv = mmu_data_translation<MMU_TT_DWRITE>(vaddr, paddr);
|
||||||
else
|
else
|
||||||
rv = mmu_data_translation<MMU_TT_DREAD>(vaddr, paddr);
|
rv = mmu_data_translation<MMU_TT_DREAD>(vaddr, paddr);
|
||||||
if (unlikely(rv != MMU_ERROR_NONE))
|
if (unlikely(rv != MmuError::NONE))
|
||||||
{
|
{
|
||||||
Sh4cntx.pc = pc;
|
Sh4cntx.pc = pc;
|
||||||
DoMMUException(vaddr, rv, write ? MMU_TT_DWRITE : MMU_TT_DREAD);
|
DoMMUException(vaddr, rv, write ? MMU_TT_DWRITE : MMU_TT_DREAD);
|
||||||
|
|
|
@ -30,7 +30,7 @@ static bool read_mem32(u32 addr, u32& data)
|
||||||
{
|
{
|
||||||
u32 pa;
|
u32 pa;
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
if (mmu_full_lookup(addr, &entry, pa) != MMU_ERROR_NONE)
|
if (mmu_full_lookup(addr, &entry, pa) != MmuError::NONE)
|
||||||
return false;
|
return false;
|
||||||
data = ReadMem32_nommu(pa);
|
data = ReadMem32_nommu(pa);
|
||||||
return true;
|
return true;
|
||||||
|
@ -40,7 +40,7 @@ static bool read_mem16(u32 addr, u16& data)
|
||||||
{
|
{
|
||||||
u32 pa;
|
u32 pa;
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
if (mmu_full_lookup(addr, &entry, pa) != MMU_ERROR_NONE)
|
if (mmu_full_lookup(addr, &entry, pa) != MmuError::NONE)
|
||||||
return false;
|
return false;
|
||||||
data = ReadMem16_nommu(pa);
|
data = ReadMem16_nommu(pa);
|
||||||
return true;
|
return true;
|
||||||
|
@ -50,7 +50,7 @@ static bool read_mem8(u32 addr, u8& data)
|
||||||
{
|
{
|
||||||
u32 pa;
|
u32 pa;
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
if (mmu_full_lookup(addr, &entry, pa) != MMU_ERROR_NONE)
|
if (mmu_full_lookup(addr, &entry, pa) != MmuError::NONE)
|
||||||
return false;
|
return false;
|
||||||
data = ReadMem8_nommu(pa);
|
data = ReadMem8_nommu(pa);
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -58,8 +58,8 @@ public:
|
||||||
{
|
{
|
||||||
bool cacheOn = false;
|
bool cacheOn = false;
|
||||||
u32 physAddr;
|
u32 physAddr;
|
||||||
u32 err = translateAddress(address, physAddr, cacheOn);
|
MmuError err = translateAddress(address, physAddr, cacheOn);
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
mmu_raise_exception(err, address, MMU_TT_IREAD);
|
mmu_raise_exception(err, address, MMU_TT_IREAD);
|
||||||
|
|
||||||
if (!cacheOn)
|
if (!cacheOn)
|
||||||
|
@ -130,11 +130,11 @@ public:
|
||||||
const u32 vaddr = data & ~0x3ff;
|
const u32 vaddr = data & ~0x3ff;
|
||||||
bool cached;
|
bool cached;
|
||||||
u32 physAddr;
|
u32 physAddr;
|
||||||
u32 err = translateAddress(vaddr, physAddr, cached);
|
MmuError err = translateAddress(vaddr, physAddr, cached);
|
||||||
if (err == MMU_ERROR_TLB_MISS)
|
if (err == MmuError::TLB_MISS)
|
||||||
// Ignore the write
|
// Ignore the write
|
||||||
return;
|
return;
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
mmu_raise_exception(err, vaddr, MMU_TT_IREAD);
|
mmu_raise_exception(err, vaddr, MMU_TT_IREAD);
|
||||||
|
|
||||||
u32 tag = (physAddr >> 10) & 0x7ffff;
|
u32 tag = (physAddr >> 10) & 0x7ffff;
|
||||||
|
@ -167,11 +167,11 @@ private:
|
||||||
u8 data[32];
|
u8 data[32];
|
||||||
};
|
};
|
||||||
|
|
||||||
u32 translateAddress(u32 address, u32& physAddr, bool& cached)
|
MmuError translateAddress(u32 address, u32& physAddr, bool& cached)
|
||||||
{
|
{
|
||||||
// Alignment errors
|
// Alignment errors
|
||||||
if (address & 1)
|
if (address & 1)
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
|
|
||||||
const u32 area = address >> 29;
|
const u32 area = address >> 29;
|
||||||
const bool userMode = sr.MD == 0;
|
const bool userMode = sr.MD == 0;
|
||||||
|
@ -181,13 +181,13 @@ private:
|
||||||
// kernel mem protected in user mode
|
// kernel mem protected in user mode
|
||||||
// FIXME this makes WinCE fail
|
// FIXME this makes WinCE fail
|
||||||
//if (address & 0x80000000)
|
//if (address & 0x80000000)
|
||||||
// return MMU_ERROR_BADADDR;
|
// return MmuError::BADADDR;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// P4 not executable
|
// P4 not executable
|
||||||
if (area == 7)
|
if (area == 7)
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
}
|
}
|
||||||
cached = CCN_CCR.ICE == 1 && cachedArea(area);
|
cached = CCN_CCR.ICE == 1 && cachedArea(area);
|
||||||
|
|
||||||
|
@ -200,9 +200,9 @@ private:
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
u32 err = mmu_instruction_lookup(address, &entry, physAddr);
|
MmuError err = mmu_instruction_lookup(address, &entry, physAddr);
|
||||||
|
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
return err;
|
return err;
|
||||||
|
|
||||||
//0X & User mode-> protection violation
|
//0X & User mode-> protection violation
|
||||||
|
@ -211,11 +211,11 @@ private:
|
||||||
{
|
{
|
||||||
u32 md = entry->Data.PR >> 1;
|
u32 md = entry->Data.PR >> 1;
|
||||||
if (md == 0)
|
if (md == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
}
|
}
|
||||||
cached = cached && entry->Data.C == 1;
|
cached = cached && entry->Data.C == 1;
|
||||||
}
|
}
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::array<cache_line, 256> lines;
|
std::array<cache_line, 256> lines;
|
||||||
|
@ -235,8 +235,8 @@ public:
|
||||||
u32 physAddr;
|
u32 physAddr;
|
||||||
bool cacheOn = false;
|
bool cacheOn = false;
|
||||||
bool copyBack;
|
bool copyBack;
|
||||||
u32 err = translateAddress<T, MMU_TT_DREAD>(address, physAddr, cacheOn, copyBack);
|
MmuError err = translateAddress<T, MMU_TT_DREAD>(address, physAddr, cacheOn, copyBack);
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
mmu_raise_exception(err, address, MMU_TT_DREAD);
|
mmu_raise_exception(err, address, MMU_TT_DREAD);
|
||||||
|
|
||||||
if (!cacheOn)
|
if (!cacheOn)
|
||||||
|
@ -264,8 +264,8 @@ public:
|
||||||
u32 physAddr = 0;
|
u32 physAddr = 0;
|
||||||
bool cacheOn = false;
|
bool cacheOn = false;
|
||||||
bool copyBack = false;
|
bool copyBack = false;
|
||||||
u32 err = translateAddress<T, MMU_TT_DWRITE>(address, physAddr, cacheOn, copyBack);
|
MmuError err = translateAddress<T, MMU_TT_DWRITE>(address, physAddr, cacheOn, copyBack);
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
mmu_raise_exception(err, address, MMU_TT_DWRITE);
|
mmu_raise_exception(err, address, MMU_TT_DWRITE);
|
||||||
|
|
||||||
if (!cacheOn)
|
if (!cacheOn)
|
||||||
|
@ -312,8 +312,8 @@ public:
|
||||||
u32 physAddr;
|
u32 physAddr;
|
||||||
bool cached = false;
|
bool cached = false;
|
||||||
bool copyBack;
|
bool copyBack;
|
||||||
u32 err = translateAddress<u8, MMU_TT_DWRITE>(address, physAddr, cached, copyBack);
|
MmuError err = translateAddress<u8, MMU_TT_DWRITE>(address, physAddr, cached, copyBack);
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
mmu_raise_exception(err, address, MMU_TT_DWRITE);
|
mmu_raise_exception(err, address, MMU_TT_DWRITE);
|
||||||
|
|
||||||
if (!cached)
|
if (!cached)
|
||||||
|
@ -336,8 +336,8 @@ public:
|
||||||
u32 physAddr;
|
u32 physAddr;
|
||||||
bool cached;
|
bool cached;
|
||||||
bool copyBack;
|
bool copyBack;
|
||||||
u32 err = translateAddress<u8, MMU_TT_DREAD>(address, physAddr, cached, copyBack);
|
MmuError err = translateAddress<u8, MMU_TT_DREAD>(address, physAddr, cached, copyBack);
|
||||||
if (err != MMU_ERROR_NONE || !cached)
|
if (err != MmuError::NONE || !cached)
|
||||||
// ignore address translation errors
|
// ignore address translation errors
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -396,11 +396,11 @@ public:
|
||||||
u32 physAddr;
|
u32 physAddr;
|
||||||
bool cached = false;
|
bool cached = false;
|
||||||
bool copyBack;
|
bool copyBack;
|
||||||
u32 err = translateAddress<u8, MMU_TT_DREAD>(data & ~0x3ff, physAddr, cached, copyBack);
|
MmuError err = translateAddress<u8, MMU_TT_DREAD>(data & ~0x3ff, physAddr, cached, copyBack);
|
||||||
if (err == MMU_ERROR_TLB_MISS)
|
if (err == MmuError::TLB_MISS)
|
||||||
// Ignore the write
|
// Ignore the write
|
||||||
return;
|
return;
|
||||||
if (err != MMU_ERROR_NONE)
|
if (err != MmuError::NONE)
|
||||||
mmu_raise_exception(err, data & ~0x3ff, MMU_TT_DREAD);
|
mmu_raise_exception(err, data & ~0x3ff, MMU_TT_DREAD);
|
||||||
|
|
||||||
u32 tag = (physAddr >> 10) & 0x7ffff;
|
u32 tag = (physAddr >> 10) & 0x7ffff;
|
||||||
|
@ -491,16 +491,16 @@ private:
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class T, u32 ACCESS>
|
template<class T, u32 ACCESS>
|
||||||
u32 translateAddress(u32 address, u32& physAddr, bool& cached, bool& copyBack)
|
MmuError translateAddress(u32 address, u32& physAddr, bool& cached, bool& copyBack)
|
||||||
{
|
{
|
||||||
// Alignment errors
|
// Alignment errors
|
||||||
if (address & (sizeof(T) - 1))
|
if (address & (sizeof(T) - 1))
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
if (ACCESS == MMU_TT_DWRITE && (address & 0xFC000000) == 0xE0000000)
|
if (ACCESS == MMU_TT_DWRITE && (address & 0xFC000000) == 0xE0000000)
|
||||||
{
|
{
|
||||||
// Store queues
|
// Store queues
|
||||||
u32 rv;
|
u32 rv;
|
||||||
u32 lookup = mmu_full_SQ<MMU_TT_DWRITE>(address, rv);
|
MmuError lookup = mmu_full_SQ<MMU_TT_DWRITE>(address, rv);
|
||||||
|
|
||||||
physAddr = address;
|
physAddr = address;
|
||||||
return lookup;
|
return lookup;
|
||||||
|
@ -510,7 +510,7 @@ private:
|
||||||
|
|
||||||
// kernel mem protected in user mode
|
// kernel mem protected in user mode
|
||||||
if (userMode && (address & 0x80000000))
|
if (userMode && (address & 0x80000000))
|
||||||
return MMU_ERROR_BADADDR;
|
return MmuError::BADADDR;
|
||||||
|
|
||||||
cached = CCN_CCR.OCE == 1 && cachedArea(area);
|
cached = CCN_CCR.OCE == 1 && cachedArea(area);
|
||||||
if (ACCESS == MMU_TT_DWRITE)
|
if (ACCESS == MMU_TT_DWRITE)
|
||||||
|
@ -526,9 +526,9 @@ private:
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const TLB_Entry *entry;
|
const TLB_Entry *entry;
|
||||||
u32 lookup = mmu_full_lookup(address, &entry, physAddr);
|
MmuError lookup = mmu_full_lookup(address, &entry, physAddr);
|
||||||
|
|
||||||
if (lookup != MMU_ERROR_NONE)
|
if (lookup != MmuError::NONE)
|
||||||
return lookup;
|
return lookup;
|
||||||
|
|
||||||
//0X & User mode-> protection violation
|
//0X & User mode-> protection violation
|
||||||
|
@ -537,16 +537,16 @@ private:
|
||||||
{
|
{
|
||||||
u32 md = entry->Data.PR >> 1;
|
u32 md = entry->Data.PR >> 1;
|
||||||
if (md == 0)
|
if (md == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
}
|
}
|
||||||
//X0 -> read only
|
//X0 -> read only
|
||||||
//X1 -> read/write , can be FW
|
//X1 -> read/write , can be FW
|
||||||
if (ACCESS == MMU_TT_DWRITE)
|
if (ACCESS == MMU_TT_DWRITE)
|
||||||
{
|
{
|
||||||
if ((entry->Data.PR & 1) == 0)
|
if ((entry->Data.PR & 1) == 0)
|
||||||
return MMU_ERROR_PROTECTED;
|
return MmuError::PROTECTED;
|
||||||
if (entry->Data.D == 0)
|
if (entry->Data.D == 0)
|
||||||
return MMU_ERROR_FIRSTWRITE;
|
return MmuError::FIRSTWRITE;
|
||||||
copyBack = copyBack && entry->Data.WT == 0;
|
copyBack = copyBack && entry->Data.WT == 0;
|
||||||
}
|
}
|
||||||
cached = cached && entry->Data.C == 1;
|
cached = cached && entry->Data.C == 1;
|
||||||
|
@ -555,7 +555,7 @@ private:
|
||||||
physAddr |= 0xF0000000;
|
physAddr |= 0xF0000000;
|
||||||
|
|
||||||
}
|
}
|
||||||
return MMU_ERROR_NONE;
|
return MmuError::NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::array<cache_line, 512> lines;
|
std::array<cache_line, 512> lines;
|
||||||
|
|
|
@ -41,26 +41,26 @@ TEST_F(MmuTest, TestUntranslated)
|
||||||
{
|
{
|
||||||
u32 pa;
|
u32 pa;
|
||||||
// P1
|
// P1
|
||||||
int err = mmu_data_translation<MMU_TT_DREAD>(0x80000000, pa);
|
MmuError err = mmu_data_translation<MMU_TT_DREAD>(0x80000000, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x80000000u, pa);
|
ASSERT_EQ(0x80000000u, pa);
|
||||||
err = mmu_instruction_translation(0x80000002, pa);
|
err = mmu_instruction_translation(0x80000002, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x80000002u, pa);
|
ASSERT_EQ(0x80000002u, pa);
|
||||||
|
|
||||||
// P2
|
// P2
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0xA0001234, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0xA0001234, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0xA0001234u, pa);
|
ASSERT_EQ(0xA0001234u, pa);
|
||||||
|
|
||||||
// P4
|
// P4
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0xFF0000CC, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0xFF0000CC, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0xFF0000CCu, pa);
|
ASSERT_EQ(0xFF0000CCu, pa);
|
||||||
|
|
||||||
// 7C000000 to 7FFFFFFF in P0/U0 not translated
|
// 7C000000 to 7FFFFFFF in P0/U0 not translated
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0x7D000088, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0x7D000088, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x7D000088u, pa);
|
ASSERT_EQ(0x7D000088u, pa);
|
||||||
|
|
||||||
// SQ write
|
// SQ write
|
||||||
|
@ -71,7 +71,7 @@ TEST_F(MmuTest, TestUntranslated)
|
||||||
UTLB[0].Data.D = 1;
|
UTLB[0].Data.D = 1;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0xE2000004, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0xE2000004, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0xE2000004, pa);
|
ASSERT_EQ(0xE2000004, pa);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,16 +86,16 @@ TEST_F(MmuTest, TestTranslated)
|
||||||
UTLB[0].Data.D = 1;
|
UTLB[0].Data.D = 1;
|
||||||
UTLB[0].Data.PPN = 0x0C000000 >> 10;
|
UTLB[0].Data.PPN = 0x0C000000 >> 10;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
int err = mmu_data_translation<MMU_TT_DREAD>(0x02000044, pa);
|
MmuError err = mmu_data_translation<MMU_TT_DREAD>(0x02000044, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x0C000044u, pa);
|
ASSERT_EQ(0x0C000044u, pa);
|
||||||
|
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000045, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000045, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x0C000045u, pa);
|
ASSERT_EQ(0x0C000045u, pa);
|
||||||
|
|
||||||
err = mmu_instruction_translation(0x02000046, pa);
|
err = mmu_instruction_translation(0x02000046, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x0C000046u, pa);
|
ASSERT_EQ(0x0C000046u, pa);
|
||||||
|
|
||||||
// ASID match
|
// ASID match
|
||||||
|
@ -103,10 +103,10 @@ TEST_F(MmuTest, TestTranslated)
|
||||||
CCN_PTEH.ASID = 13;
|
CCN_PTEH.ASID = 13;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000222, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000222, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x0C000222u, pa);
|
ASSERT_EQ(0x0C000222u, pa);
|
||||||
err = mmu_instruction_translation(0x02000232, pa);
|
err = mmu_instruction_translation(0x02000232, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x0C000232u, pa);
|
ASSERT_EQ(0x0C000232u, pa);
|
||||||
|
|
||||||
// Shared entry
|
// Shared entry
|
||||||
|
@ -114,14 +114,14 @@ TEST_F(MmuTest, TestTranslated)
|
||||||
CCN_PTEH.ASID = 14;
|
CCN_PTEH.ASID = 14;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000222, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000222, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0x0C000222u, pa);
|
ASSERT_EQ(0x0C000222u, pa);
|
||||||
|
|
||||||
// 1C000000-1FFFFFF mapped to P4
|
// 1C000000-1FFFFFF mapped to P4
|
||||||
UTLB[0].Data.PPN = 0x1C000000 >> 10;
|
UTLB[0].Data.PPN = 0x1C000000 >> 10;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000222, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x02000222, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
ASSERT_EQ(0xFC000222u, pa);
|
ASSERT_EQ(0xFC000222u, pa);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,15 +136,15 @@ TEST_F(MmuTest, TestMiss)
|
||||||
UTLB[0].Data.PPN = 0x0C000000 >> 10;
|
UTLB[0].Data.PPN = 0x0C000000 >> 10;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
// no match
|
// no match
|
||||||
int err = mmu_data_translation<MMU_TT_DREAD>(0x02100044, pa);
|
MmuError err = mmu_data_translation<MMU_TT_DREAD>(0x02100044, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_TLB_MISS, err);
|
ASSERT_EQ(MmuError::TLB_MISS, err);
|
||||||
|
|
||||||
#ifndef FAST_MMU
|
#ifndef FAST_MMU
|
||||||
// entry not valid
|
// entry not valid
|
||||||
UTLB[0].Data.V = 0;
|
UTLB[0].Data.V = 0;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0x02000044, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0x02000044, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_TLB_MISS, err);
|
ASSERT_EQ(MmuError::TLB_MISS, err);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// asid mismatch
|
// asid mismatch
|
||||||
|
@ -153,20 +153,20 @@ TEST_F(MmuTest, TestMiss)
|
||||||
CCN_PTEH.ASID = 14;
|
CCN_PTEH.ASID = 14;
|
||||||
UTLB_Sync(0);
|
UTLB_Sync(0);
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0x02000044, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0x02000044, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_TLB_MISS, err);
|
ASSERT_EQ(MmuError::TLB_MISS, err);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(MmuTest, TestErrors)
|
TEST_F(MmuTest, TestErrors)
|
||||||
{
|
{
|
||||||
#ifndef FAST_MMU
|
#ifndef FAST_MMU
|
||||||
u32 pa;
|
u32 pa;
|
||||||
int err;
|
MmuError err;
|
||||||
|
|
||||||
// P4 not executable
|
// P4 not executable
|
||||||
err = mmu_instruction_translation(0xFF00008A, pa);
|
err = mmu_instruction_translation(0xFF00008A, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_BADADDR, err);
|
ASSERT_EQ(MmuError::BADADDR, err);
|
||||||
err = mmu_instruction_translation(0xE0000004, pa);
|
err = mmu_instruction_translation(0xE0000004, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_BADADDR, err);
|
ASSERT_EQ(MmuError::BADADDR, err);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// unaligned address
|
// unaligned address
|
||||||
|
@ -186,33 +186,33 @@ TEST_F(MmuTest, TestErrors)
|
||||||
UTLB[0].Data.PPN = 0x0A000000 >> 10;
|
UTLB[0].Data.PPN = 0x0A000000 >> 10;
|
||||||
// no access in user mode
|
// no access in user mode
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_PROTECTED, err);
|
ASSERT_EQ(MmuError::PROTECTED, err);
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_PROTECTED, err);
|
ASSERT_EQ(MmuError::PROTECTED, err);
|
||||||
err = mmu_instruction_translation(0x04000042, pa);
|
err = mmu_instruction_translation(0x04000042, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_PROTECTED, err);
|
ASSERT_EQ(MmuError::PROTECTED, err);
|
||||||
// read-only access in priv mode
|
// read-only access in priv mode
|
||||||
p_sh4rcb->cntx.sr.MD = 1;
|
p_sh4rcb->cntx.sr.MD = 1;
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_NONE, err);
|
ASSERT_EQ(MmuError::NONE, err);
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_PROTECTED, err);
|
ASSERT_EQ(MmuError::PROTECTED, err);
|
||||||
// read-only access in user & priv mode
|
// read-only access in user & priv mode
|
||||||
UTLB[0].Data.PR = 2;
|
UTLB[0].Data.PR = 2;
|
||||||
p_sh4rcb->cntx.sr.MD = 0;
|
p_sh4rcb->cntx.sr.MD = 0;
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_PROTECTED, err);
|
ASSERT_EQ(MmuError::PROTECTED, err);
|
||||||
p_sh4rcb->cntx.sr.MD = 1;
|
p_sh4rcb->cntx.sr.MD = 1;
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_PROTECTED, err);
|
ASSERT_EQ(MmuError::PROTECTED, err);
|
||||||
UTLB[0].Data.PR = 3;
|
UTLB[0].Data.PR = 3;
|
||||||
|
|
||||||
// kernel address in user mode
|
// kernel address in user mode
|
||||||
p_sh4rcb->cntx.sr.MD = 0;
|
p_sh4rcb->cntx.sr.MD = 0;
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0xA4000004, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0xA4000004, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_BADADDR, err);
|
ASSERT_EQ(MmuError::BADADDR, err);
|
||||||
err = mmu_instruction_translation(0xA4000006, pa);
|
err = mmu_instruction_translation(0xA4000006, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_BADADDR, err);
|
ASSERT_EQ(MmuError::BADADDR, err);
|
||||||
|
|
||||||
// multiple hits
|
// multiple hits
|
||||||
memset(ITLB, 0, sizeof(ITLB));
|
memset(ITLB, 0, sizeof(ITLB));
|
||||||
|
@ -223,14 +223,14 @@ TEST_F(MmuTest, TestErrors)
|
||||||
UTLB[1].Data.D = 1;
|
UTLB[1].Data.D = 1;
|
||||||
UTLB[1].Data.PPN = 0x0C000000 >> 10;
|
UTLB[1].Data.PPN = 0x0C000000 >> 10;
|
||||||
err = mmu_data_translation<MMU_TT_DREAD>(0x04000040, pa);
|
err = mmu_data_translation<MMU_TT_DREAD>(0x04000040, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_TLB_MHIT, err);
|
ASSERT_EQ(MmuError::TLB_MHIT, err);
|
||||||
err = mmu_instruction_translation(0x04000042, pa);
|
err = mmu_instruction_translation(0x04000042, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_TLB_MHIT, err);
|
ASSERT_EQ(MmuError::TLB_MHIT, err);
|
||||||
UTLB[1].Data.V = 0;
|
UTLB[1].Data.V = 0;
|
||||||
|
|
||||||
// first write
|
// first write
|
||||||
UTLB[0].Data.D = 0;
|
UTLB[0].Data.D = 0;
|
||||||
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000224, pa);
|
err = mmu_data_translation<MMU_TT_DWRITE>(0x04000224, pa);
|
||||||
ASSERT_EQ(MMU_ERROR_FIRSTWRITE, err);
|
ASSERT_EQ(MmuError::FIRSTWRITE, err);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue