shadPS4/src/core/memory.h

228 lines
5.9 KiB
C
Raw Normal View History

// SPDX-FileCopyrightText: Copyright 2024 shadPS4 Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
#pragma once
#include <map>
2024-06-09 23:13:44 +00:00
#include <mutex>
#include <string_view>
#include "common/enum.h"
#include "common/singleton.h"
#include "common/types.h"
#include "core/address_space.h"
2024-06-09 23:13:44 +00:00
#include "core/libraries/kernel/memory_management.h"
namespace Vulkan {
class Rasterizer;
}
2024-05-30 15:07:36 +00:00
namespace Libraries::Kernel {
struct OrbisQueryInfo;
}
namespace Core {
enum class MemoryProt : u32 {
NoAccess = 0,
CpuRead = 1,
CpuReadWrite = 2,
GpuRead = 16,
GpuWrite = 32,
GpuReadWrite = 38,
};
enum class MemoryMapFlags : u32 {
NoFlags = 0,
Shared = 1,
Private = 2,
Fixed = 0x10,
NoOverwrite = 0x0080,
NoSync = 0x800,
NoCore = 0x20000,
NoCoalesce = 0x400000,
};
DECLARE_ENUM_FLAG_OPERATORS(MemoryMapFlags)
enum class VMAType : u32 {
Free = 0,
Reserved = 1,
Direct = 2,
Flexible = 3,
Pooled = 4,
Stack = 5,
2024-06-09 23:13:44 +00:00
Code = 6,
File = 7,
};
struct DirectMemoryArea {
PAddr base = 0;
size_t size = 0;
int memory_type = 0;
2024-06-09 23:13:44 +00:00
bool is_free = true;
PAddr GetEnd() const {
return base + size;
}
bool CanMergeWith(const DirectMemoryArea& next) const {
if (base + size != next.base) {
return false;
}
if (is_free != next.is_free) {
return false;
}
return true;
}
};
struct VirtualMemoryArea {
VAddr base = 0;
size_t size = 0;
PAddr phys_base = 0;
VMAType type = VMAType::Free;
MemoryProt prot = MemoryProt::NoAccess;
bool disallow_merge = false;
std::string name = "";
uintptr_t fd = 0;
bool is_exec = false;
bool Contains(VAddr addr, size_t size) const {
return addr >= base && (addr + size) <= (base + this->size);
}
bool IsFree() const noexcept {
return type == VMAType::Free;
}
bool IsMapped() const noexcept {
return type != VMAType::Free && type != VMAType::Reserved;
}
bool CanMergeWith(const VirtualMemoryArea& next) const {
if (disallow_merge || next.disallow_merge) {
return false;
}
if (base + size != next.base) {
return false;
}
if (type == VMAType::Direct && phys_base + size != next.phys_base) {
return false;
}
if (prot != next.prot || type != next.type) {
return false;
}
return true;
}
};
class MemoryManager {
2024-06-09 23:13:44 +00:00
using DMemMap = std::map<PAddr, DirectMemoryArea>;
using DMemHandle = DMemMap::iterator;
using VMAMap = std::map<VAddr, VirtualMemoryArea>;
using VMAHandle = VMAMap::iterator;
public:
explicit MemoryManager();
~MemoryManager();
void SetRasterizer(Vulkan::Rasterizer* rasterizer_) {
rasterizer = rasterizer_;
}
2024-06-21 15:22:37 +00:00
void SetTotalFlexibleSize(u64 size) {
total_flexible_size = size;
}
u64 GetAvailableFlexibleSize() const {
return total_flexible_size - flexible_usage;
}
VAddr SystemReservedVirtualBase() noexcept {
return impl.SystemReservedVirtualBase();
2024-07-09 09:18:34 +00:00
}
PAddr Allocate(PAddr search_start, PAddr search_end, size_t size, u64 alignment,
int memory_type);
void Free(PAddr phys_addr, size_t size);
int Reserve(void** out_addr, VAddr virtual_addr, size_t size, MemoryMapFlags flags,
u64 alignment = 0);
int MapMemory(void** out_addr, VAddr virtual_addr, size_t size, MemoryProt prot,
MemoryMapFlags flags, VMAType type, std::string_view name = "",
2024-06-09 23:13:44 +00:00
bool is_exec = false, PAddr phys_addr = -1, u64 alignment = 0);
int MapFile(void** out_addr, VAddr virtual_addr, size_t size, MemoryProt prot,
MemoryMapFlags flags, uintptr_t fd, size_t offset);
void UnmapMemory(VAddr virtual_addr, size_t size);
int QueryProtection(VAddr addr, void** start, void** end, u32* prot);
2024-06-09 23:13:44 +00:00
int VirtualQuery(VAddr addr, int flags, Libraries::Kernel::OrbisVirtualQueryInfo* info);
2024-05-30 15:07:36 +00:00
int DirectMemoryQuery(PAddr addr, bool find_next, Libraries::Kernel::OrbisQueryInfo* out_info);
2024-06-09 23:13:44 +00:00
int DirectQueryAvailable(PAddr search_start, PAddr search_end, size_t alignment,
PAddr* phys_addr_out, size_t* size_out);
int GetDirectMemoryType(PAddr addr, int* directMemoryTypeOut, void** directMemoryStartOut,
void** directMemoryEndOut);
void NameVirtualRange(VAddr virtual_addr, size_t size, std::string_view name);
private:
VMAHandle FindVMA(VAddr target) {
2024-06-09 23:13:44 +00:00
return std::prev(vma_map.upper_bound(target));
}
DMemHandle FindDmemArea(PAddr target) {
return std::prev(dmem_map.upper_bound(target));
}
template <typename Handle>
Handle MergeAdjacent(auto& handle_map, Handle iter) {
const auto next_vma = std::next(iter);
if (next_vma != handle_map.end() && iter->second.CanMergeWith(next_vma->second)) {
iter->second.size += next_vma->second.size;
handle_map.erase(next_vma);
}
if (iter != handle_map.begin()) {
auto prev_vma = std::prev(iter);
if (prev_vma->second.CanMergeWith(iter->second)) {
prev_vma->second.size += iter->second.size;
handle_map.erase(iter);
iter = prev_vma;
}
}
2024-06-09 23:13:44 +00:00
return iter;
}
VAddr SearchFree(VAddr virtual_addr, size_t size, u32 alignment = 0);
VMAHandle CarveVMA(VAddr virtual_addr, size_t size);
DMemHandle CarveDmemArea(PAddr addr, size_t size);
2024-06-09 23:13:44 +00:00
2024-05-30 15:07:36 +00:00
VMAHandle Split(VMAHandle vma_handle, size_t offset_in_vma);
2024-06-09 23:13:44 +00:00
DMemHandle Split(DMemHandle dmem_handle, size_t offset_in_area);
private:
AddressSpace impl;
2024-06-09 23:13:44 +00:00
DMemMap dmem_map;
VMAMap vma_map;
2024-06-09 23:13:44 +00:00
std::recursive_mutex mutex;
2024-06-21 15:22:37 +00:00
size_t total_flexible_size = 448_MB;
size_t flexible_usage{};
Vulkan::Rasterizer* rasterizer{};
};
using Memory = Common::Singleton<MemoryManager>;
} // namespace Core