Refactorings, fixing alignment of contiguous memory allocation

This allows Turok to go ingame again
This commit is contained in:
PatrickvL 2017-03-30 13:48:20 +02:00
parent 28721c0f7e
commit dcd647708a
3 changed files with 111 additions and 94 deletions

View File

@ -66,15 +66,26 @@ typedef uint32 xbaddr;
#define XBADDR_BITS 32
#define XBADDR_MAX UINT32_MAX
// Define virtual base and alternate virtual base of kernel.
#define KSEG0_BASE 0x80000000
// Define virtual base addresses for physical memory windows.
#define MM_SYSTEM_PHYSICAL_MAP KSEG0_BASE
#define MM_HIGHEST_PHYSICAL_PAGE 0x07FFF
#define MM_64M_PHYSICAL_PAGE 0x04000
#define MM_INSTANCE_PHYSICAL_PAGE 0x03FE0 // Chihiro arcade should use 0x07FF0
#define MM_INSTANCE_PAGE_COUNT 16
#define CONTIGUOUS_MEMORY_SIZE (64 * ONE_MB)
/*! memory size per system */
#define XBOX_MEMORY_SIZE (64 * ONE_MB)
#define CHIHIRO_MEMORY_SIZE (128 * ONE_MB)
#define XBE_IMAGE_BASE 0x00010000
/*! base addresses of various components */
#define XBOX_KERNEL_BASE 0x80010000
#define XBOX_KERNEL_BASE (MM_SYSTEM_PHYSICAL_MAP + XBE_IMAGE_BASE)
#define XBOX_NV2A_INIT_VECTOR 0xFF000008
#define XBE_IMAGE_BASE 0x00010000
// For now, virtual addresses are somewhat limited, as we use
// these soley for loading XBE sections. The largest that we
// know of, is "BLiNX: the time sweeper", which has a section

View File

@ -34,6 +34,9 @@
// *
// ******************************************************************
#include <assert.h> // For assert()
#include <malloc.h> // For _aligned_malloc()
#include "CxbxKrnl.h"
#include "Emu.h"
#include "Logging.h"
@ -55,22 +58,23 @@ void* MemoryManager::Allocate(size_t size)
{
LOG_FUNC_ONE_ARG(size);
EnterCriticalSection(&m_CriticalSection);
assert(size > 0);
void* ptr = malloc(size);
void * addr = malloc(size);
if (addr != NULL) {
TypedMemoryBlock info;
if (ptr != nullptr) {
MemoryBlockInfo info;
info.offset = (uint32_t)ptr;
info.size = size;
info.type = MemoryType::STANDARD;
info.block.addr = addr;
info.block.size = size;
m_MemoryBlockInfo[info.offset] = info;
EnterCriticalSection(&m_CriticalSection);
m_MemoryBlockInfo[addr] = info;
LeaveCriticalSection(&m_CriticalSection);
}
LeaveCriticalSection(&m_CriticalSection);
RETURN(ptr);
RETURN((void*)addr);
}
void* MemoryManager::AllocateAligned(size_t size, size_t alignment)
@ -80,22 +84,23 @@ void* MemoryManager::AllocateAligned(size_t size, size_t alignment)
LOG_FUNC_ARG(alignment);
LOG_FUNC_END;
EnterCriticalSection(&m_CriticalSection);
assert(size > 0);
void* ptr = _aligned_malloc(size, alignment);
void * addr = _aligned_malloc(size, alignment);
if (addr != NULL) {
TypedMemoryBlock info;
if (ptr != nullptr) {
MemoryBlockInfo info;
info.offset = (uint32_t)ptr;
info.size = size;
info.type = MemoryType::ALIGNED;
info.block.addr = addr;
info.block.size = size;
m_MemoryBlockInfo[info.offset] = info;
EnterCriticalSection(&m_CriticalSection);
m_MemoryBlockInfo[addr] = info;
LeaveCriticalSection(&m_CriticalSection);
}
LeaveCriticalSection(&m_CriticalSection);
RETURN(ptr);
RETURN((void*)addr);
}
void* MemoryManager::AllocateContiguous(size_t size, size_t alignment)
@ -105,35 +110,44 @@ void* MemoryManager::AllocateContiguous(size_t size, size_t alignment)
LOG_FUNC_ARG(alignment);
LOG_FUNC_END;
assert(size > 0);
assert(alignment >= 4096); // TODO : Pull PAGE_SIZE in scope for this?
assert((alignment & (alignment - 1)) == 0);
xbaddr alignMask = (xbaddr)(alignment - 1);
xbaddr addr = NULL;
EnterCriticalSection(&m_CriticalSection);
// If the end address of the block won't meet the alignment, adjust the size
if (size % alignment > 0) {
size = (size + alignment) + (size % alignment);
}
uint32_t addr = NULL;
// If the allocation table is empty, we can allocate wherever we please
if (m_ContiguousMemoryRegions.size() == 0) {
// Is the contiguous allocation table empty?
if (m_ContiguousMemoryBlocks.size() == 0) {
// Start allocating Contiguous Memory after the Kernel image header to prevent overwriting our dummy Kernel
addr = XBOX_KERNEL_BASE + sizeof(DUMMY_KERNEL);
addr = (addr + alignMask) & ~alignMask;
} else {
// Locate the first available Memory Region with enough space for the requested buffer
// This could be improved later on by always locating the smallest block with enough space
// TODO : This could be improved later on by always locating the smallest block with enough space
// in order to reduce memory fragmentation.
for (auto it = m_ContiguousMemoryRegions.begin(); it != m_ContiguousMemoryRegions.end(); ++it) {
ContiguousMemoryRegion current = it->second;
for (auto it = m_ContiguousMemoryBlocks.begin(); it != m_ContiguousMemoryBlocks.end(); ++it) {
MemoryBlock current = it->second;
xbaddr after_current = (xbaddr)current.addr + current.size;
after_current = (after_current + alignMask) & ~alignMask;
if (std::next(it) == m_ContiguousMemoryRegions.end()) {
addr = current.offset + current.size;
// Is this the last entry?
if (std::next(it) == m_ContiguousMemoryBlocks.end()) {
// Continue allocating after the last block :
addr = after_current;
break;
}
ContiguousMemoryRegion next = std::next(it)->second;
if (((current.offset + current.size + size) < next.offset)) {
addr = current.offset + current.size;
break;
// Is there an empty gap after the current entry?
MemoryBlock next = std::next(it)->second;
if (after_current < (xbaddr)next.addr) {
// does this allocation fit inside the gap?
if (after_current + size < (xbaddr)next.addr) {
addr = after_current;
break;
}
}
}
}
@ -144,16 +158,18 @@ void* MemoryManager::AllocateContiguous(size_t size, size_t alignment)
}
if (addr != NULL) {
ContiguousMemoryRegion region;
region.offset = addr;
region.size = size;
m_ContiguousMemoryRegions[addr] = region;
MemoryBlock block;
block.addr = (void *)addr;
block.size = size;
m_ContiguousMemoryBlocks[(void *)addr] = block;
TypedMemoryBlock info;
MemoryBlockInfo info;
info.type = MemoryType::CONTIGUOUS;
info.offset = region.offset;
info.size = region.size;
m_MemoryBlockInfo[addr] = info;
info.block = block;
m_MemoryBlockInfo[(void *)addr] = info;
}
LeaveCriticalSection(&m_CriticalSection);
@ -162,42 +178,42 @@ void* MemoryManager::AllocateContiguous(size_t size, size_t alignment)
void* MemoryManager::AllocateZeroed(size_t num, size_t size)
{
void* buffer = Allocate(num * size);
memset(buffer, 0, num * size);
return buffer;
void* addr = Allocate(num * size);
memset(addr, 0, num * size);
return addr;
}
bool MemoryManager::IsAllocated(void* block)
bool MemoryManager::IsAllocated(void* addr)
{
LOG_FUNC_ONE_ARG(block);
LOG_FUNC_ONE_ARG(addr);
EnterCriticalSection(&m_CriticalSection);
bool result = m_MemoryBlockInfo.find((uint32_t)block) != m_MemoryBlockInfo.end();
bool result = m_MemoryBlockInfo.find(addr) != m_MemoryBlockInfo.end();
LeaveCriticalSection(&m_CriticalSection);
RETURN(result);
}
void MemoryManager::Free(void* block)
void MemoryManager::Free(void* addr)
{
LOG_FUNC_ONE_ARG(block);
LOG_FUNC_ONE_ARG(addr);
EnterCriticalSection(&m_CriticalSection);
if (IsAllocated(block)) {
MemoryBlockInfo info = m_MemoryBlockInfo[info.offset];
if (IsAllocated(addr)) {
TypedMemoryBlock info = m_MemoryBlockInfo[addr];
switch (info.type) {
case MemoryType::ALIGNED:
_aligned_free((void*)info.offset);
m_MemoryBlockInfo.erase(info.offset);
_aligned_free((void*)info.block.addr);
m_MemoryBlockInfo.erase(info.block.addr);
break;
case MemoryType::STANDARD:
free((void*)info.offset);
m_MemoryBlockInfo.erase(info.offset);
free((void*)info.block.addr);
m_MemoryBlockInfo.erase(info.block.addr);
break;
case MemoryType::CONTIGUOUS:
m_ContiguousMemoryRegions.erase(info.offset);
m_MemoryBlockInfo.erase(info.offset);
m_ContiguousMemoryBlocks.erase(info.block.addr);
m_MemoryBlockInfo.erase(info.block.addr);
break;
default:
CxbxKrnlCleanup("Fatal: MemoryManager attempted to free memory of an unknown type");
@ -211,17 +227,18 @@ void MemoryManager::Free(void* block)
LeaveCriticalSection(&m_CriticalSection);
}
size_t MemoryManager::QueryAllocationSize(void* block)
size_t MemoryManager::QueryAllocationSize(void* addr)
{
LOG_FUNC_ONE_ARG(block);
EnterCriticalSection(&m_CriticalSection);
LOG_FUNC_ONE_ARG(addr);
size_t ret = 0;
if (IsAllocated(block)) {
MemoryBlockInfo info = m_MemoryBlockInfo[(uint32_t)block];
ret = info.size;
EnterCriticalSection(&m_CriticalSection);
// TODO : Allow queries both from the start AND somewhere inside an allocated block,
// which will fix at least part of https://github.com/Cxbx-Reloaded/Cxbx-Reloaded/issues/283
if (IsAllocated(addr)) {
TypedMemoryBlock info = m_MemoryBlockInfo[addr];
ret = info.block.size;
}
else {
EmuWarning("MemoryManager: Attempted to query memory that was not allocated via MemoryManager");

View File

@ -42,15 +42,10 @@
#include <map>
#include <unordered_map>
// Define virtual base and alternate virtual base of kernel.
#define KSEG0_BASE 0x80000000
// Define virtual base addresses for physical memory windows.
#define MM_SYSTEM_PHYSICAL_MAP KSEG0_BASE
#define MM_HIGHEST_PHYSICAL_PAGE 0x07FFF
#define MM_64M_PHYSICAL_PAGE 0x04000
#define MM_INSTANCE_PHYSICAL_PAGE 0x03FE0 // Chihiro arcade should use 0x07FF0
#define MM_INSTANCE_PAGE_COUNT 16
#define CONTIGUOUS_MEMORY_SIZE (64 * ONE_MB)
typedef struct {
void *addr;
size_t size;
} MemoryBlock;
enum struct MemoryType {
STANDARD = 0,
@ -60,14 +55,8 @@ enum struct MemoryType {
typedef struct {
MemoryType type;
uint32_t offset;
size_t size;
} MemoryBlockInfo;
typedef struct {
uint32_t offset;
size_t size;
} ContiguousMemoryRegion;
MemoryBlock block;
} TypedMemoryBlock;
class MemoryManager
{
@ -78,12 +67,12 @@ public:
void* AllocateAligned(size_t size, size_t alignment);
void* AllocateContiguous(size_t size, size_t alignment);
void* AllocateZeroed(size_t num, size_t size);
bool IsAllocated(void* block);
void Free(void* block);
size_t QueryAllocationSize(void* block);
bool IsAllocated(void* addr);
void Free(void* addr);
size_t QueryAllocationSize(void* addr);
private:
std::unordered_map<uint32_t, MemoryBlockInfo> m_MemoryBlockInfo;
std::map<uint32_t, ContiguousMemoryRegion> m_ContiguousMemoryRegions;
std::unordered_map<void *, TypedMemoryBlock> m_MemoryBlockInfo;
std::map<void *, MemoryBlock> m_ContiguousMemoryBlocks;
CRITICAL_SECTION m_CriticalSection;
};