forked from ShuriZma/suyu
1
0
Fork 0

common/atomic_ops: Don't cast away volatile from pointers

Preserves the volatility of the pointers being casted.
This commit is contained in:
Lioncash 2020-07-28 04:32:39 -04:00
parent 05781ce8c4
commit e3f0c93230
3 changed files with 30 additions and 27 deletions

View File

@ -14,50 +14,55 @@ namespace Common {
#if _MSC_VER #if _MSC_VER
bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) { bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) {
u8 result = _InterlockedCompareExchange8((char*)pointer, value, expected); const u8 result =
_InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected);
return result == expected; return result == expected;
} }
bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) { bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) {
u16 result = _InterlockedCompareExchange16((short*)pointer, value, expected); const u16 result =
_InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected);
return result == expected; return result == expected;
} }
bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) { bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) {
u32 result = _InterlockedCompareExchange((long*)pointer, value, expected); const u32 result =
_InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected);
return result == expected; return result == expected;
} }
bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) { bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) {
u64 result = _InterlockedCompareExchange64((__int64*)pointer, value, expected); const u64 result = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer),
value, expected);
return result == expected; return result == expected;
} }
bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) { bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) {
return _InterlockedCompareExchange128((__int64*)pointer, value[1], value[0], return _InterlockedCompareExchange128(reinterpret_cast<volatile __int64*>(pointer), value[1],
(__int64*)expected.data()) != 0; value[0],
reinterpret_cast<__int64*>(expected.data())) != 0;
} }
#else #else
bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) { bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) {
return __sync_bool_compare_and_swap(pointer, expected, value); return __sync_bool_compare_and_swap(pointer, expected, value);
} }
bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) { bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) {
return __sync_bool_compare_and_swap(pointer, expected, value); return __sync_bool_compare_and_swap(pointer, expected, value);
} }
bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) { bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) {
return __sync_bool_compare_and_swap(pointer, expected, value); return __sync_bool_compare_and_swap(pointer, expected, value);
} }
bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) { bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) {
return __sync_bool_compare_and_swap(pointer, expected, value); return __sync_bool_compare_and_swap(pointer, expected, value);
} }
bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) { bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) {
unsigned __int128 value_a; unsigned __int128 value_a;
unsigned __int128 expected_a; unsigned __int128 expected_a;
std::memcpy(&value_a, value.data(), sizeof(u128)); std::memcpy(&value_a, value.data(), sizeof(u128));

View File

@ -8,10 +8,10 @@
namespace Common { namespace Common {
bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected); bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected);
bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected); bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected);
bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected); bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected);
bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected); bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected);
bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected); bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected);
} // namespace Common } // namespace Common

View File

@ -704,7 +704,7 @@ struct Memory::Impl {
u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS]; u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
if (page_pointer != nullptr) { if (page_pointer != nullptr) {
// NOTE: Avoid adding any extra logic to this fast-path block // NOTE: Avoid adding any extra logic to this fast-path block
T volatile* pointer = reinterpret_cast<T volatile*>(&page_pointer[vaddr]); auto* pointer = reinterpret_cast<volatile T*>(&page_pointer[vaddr]);
return Common::AtomicCompareAndSwap(pointer, data, expected); return Common::AtomicCompareAndSwap(pointer, data, expected);
} }
@ -720,9 +720,8 @@ struct Memory::Impl {
case Common::PageType::RasterizerCachedMemory: { case Common::PageType::RasterizerCachedMemory: {
u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)}; u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)};
system.GPU().InvalidateRegion(vaddr, sizeof(T)); system.GPU().InvalidateRegion(vaddr, sizeof(T));
T volatile* pointer = reinterpret_cast<T volatile*>(&host_ptr); auto* pointer = reinterpret_cast<volatile T*>(&host_ptr);
return Common::AtomicCompareAndSwap(pointer, data, expected); return Common::AtomicCompareAndSwap(pointer, data, expected);
break;
} }
default: default:
UNREACHABLE(); UNREACHABLE();
@ -734,7 +733,7 @@ struct Memory::Impl {
u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS]; u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
if (page_pointer != nullptr) { if (page_pointer != nullptr) {
// NOTE: Avoid adding any extra logic to this fast-path block // NOTE: Avoid adding any extra logic to this fast-path block
u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&page_pointer[vaddr]); auto* pointer = reinterpret_cast<volatile u64*>(&page_pointer[vaddr]);
return Common::AtomicCompareAndSwap(pointer, data, expected); return Common::AtomicCompareAndSwap(pointer, data, expected);
} }
@ -750,9 +749,8 @@ struct Memory::Impl {
case Common::PageType::RasterizerCachedMemory: { case Common::PageType::RasterizerCachedMemory: {
u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)}; u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)};
system.GPU().InvalidateRegion(vaddr, sizeof(u128)); system.GPU().InvalidateRegion(vaddr, sizeof(u128));
u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&host_ptr); auto* pointer = reinterpret_cast<volatile u64*>(&host_ptr);
return Common::AtomicCompareAndSwap(pointer, data, expected); return Common::AtomicCompareAndSwap(pointer, data, expected);
break;
} }
default: default:
UNREACHABLE(); UNREACHABLE();