core/memory: Check our memory fallbacks for out-of-bound behavior.

This makes it by far harder to crash yuzu.

Also implement the 48bit masking of AARCH64 while touching this code.
This commit is contained in:
Markus Wick 2021-05-29 09:24:09 +02:00
parent ddb186e61d
commit 42a7c5d017
3 changed files with 46 additions and 4 deletions

View file

@ -14,6 +14,7 @@ void PageTable::Resize(size_t address_space_width_in_bits, size_t page_size_in_b
const size_t num_page_table_entries{1ULL << (address_space_width_in_bits - page_size_in_bits)};
pointers.resize(num_page_table_entries);
backing_addr.resize(num_page_table_entries);
current_address_space_width_in_bits = address_space_width_in_bits;
}
} // namespace Common

View file

@ -98,6 +98,10 @@ struct PageTable {
*/
void Resize(size_t address_space_width_in_bits, size_t page_size_in_bits);
size_t GetAddressSpaceBits() const {
return current_address_space_width_in_bits;
}
/**
* Vector of memory pointers backing each page. An entry can only be non-null if the
* corresponding attribute element is of type `Memory`.
@ -105,6 +109,8 @@ struct PageTable {
VirtualBuffer<PageInfo> pointers;
VirtualBuffer<u64> backing_addr;
size_t current_address_space_width_in_bits;
};
} // namespace Common

View file

@ -591,7 +591,15 @@ struct Memory::Impl {
* @returns The instance of T read from the specified virtual address.
*/
template <typename T>
T Read(const VAddr vaddr) {
T Read(VAddr vaddr) {
// AARCH64 masks the upper 16 bit of all memory accesses
vaddr &= 0xffffffffffffLL;
if (vaddr >= 1uLL << current_page_table->GetAddressSpaceBits()) {
LOG_ERROR(HW_Memory, "Unmapped Read{} @ 0x{:08X}", sizeof(T) * 8, vaddr);
return 0;
}
// Avoid adding any extra logic to this fast-path block
const uintptr_t raw_pointer = current_page_table->pointers[vaddr >> PAGE_BITS].Raw();
if (const u8* const pointer = Common::PageTable::PageInfo::ExtractPointer(raw_pointer)) {
@ -629,7 +637,16 @@ struct Memory::Impl {
* is undefined.
*/
template <typename T>
void Write(const VAddr vaddr, const T data) {
void Write(VAddr vaddr, const T data) {
// AARCH64 masks the upper 16 bit of all memory accesses
vaddr &= 0xffffffffffffLL;
if (vaddr >= 1uLL << current_page_table->GetAddressSpaceBits()) {
LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}", sizeof(data) * 8,
static_cast<u32>(data), vaddr);
return;
}
// Avoid adding any extra logic to this fast-path block
const uintptr_t raw_pointer = current_page_table->pointers[vaddr >> PAGE_BITS].Raw();
if (u8* const pointer = Common::PageTable::PageInfo::ExtractPointer(raw_pointer)) {
@ -656,7 +673,16 @@ struct Memory::Impl {
}
template <typename T>
bool WriteExclusive(const VAddr vaddr, const T data, const T expected) {
bool WriteExclusive(VAddr vaddr, const T data, const T expected) {
// AARCH64 masks the upper 16 bit of all memory accesses
vaddr &= 0xffffffffffffLL;
if (vaddr >= 1uLL << current_page_table->GetAddressSpaceBits()) {
LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}", sizeof(data) * 8,
static_cast<u32>(data), vaddr);
return true;
}
const uintptr_t raw_pointer = current_page_table->pointers[vaddr >> PAGE_BITS].Raw();
if (u8* const pointer = Common::PageTable::PageInfo::ExtractPointer(raw_pointer)) {
// NOTE: Avoid adding any extra logic to this fast-path block
@ -683,7 +709,16 @@ struct Memory::Impl {
return true;
}
bool WriteExclusive128(const VAddr vaddr, const u128 data, const u128 expected) {
bool WriteExclusive128(VAddr vaddr, const u128 data, const u128 expected) {
// AARCH64 masks the upper 16 bit of all memory accesses
vaddr &= 0xffffffffffffLL;
if (vaddr >= 1uLL << current_page_table->GetAddressSpaceBits()) {
LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}", sizeof(data) * 8,
static_cast<u32>(data[0]), vaddr);
return true;
}
const uintptr_t raw_pointer = current_page_table->pointers[vaddr >> PAGE_BITS].Raw();
if (u8* const pointer = Common::PageTable::PageInfo::ExtractPointer(raw_pointer)) {
// NOTE: Avoid adding any extra logic to this fast-path block