mirror of
https://github.com/Atmosphere-NX/Atmosphere
synced 2024-12-22 12:21:18 +00:00
kern: refactor KMemoryLayout
This commit is contained in:
parent
90fd771fce
commit
1b63002f91
11 changed files with 327 additions and 491 deletions
|
@ -36,11 +36,13 @@ namespace ams::kern::board::nintendo::nx {
|
|||
u32 hs_detached_value;
|
||||
private:
|
||||
static ALWAYS_INLINE bool IsHeapVirtualAddress(KVirtualAddress addr) {
|
||||
return KMemoryLayout::IsHeapVirtualAddress(nullptr, addr);
|
||||
const KMemoryRegion *hint = nullptr;
|
||||
return KMemoryLayout::IsHeapVirtualAddress(hint, addr);
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE bool IsHeapPhysicalAddress(KPhysicalAddress addr) {
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(nullptr, addr);
|
||||
const KMemoryRegion *hint = nullptr;
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(hint, addr);
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE KVirtualAddress GetHeapVirtualAddress(KPhysicalAddress addr) {
|
||||
|
|
|
@ -122,9 +122,13 @@ namespace ams::kern {
|
|||
}
|
||||
}
|
||||
|
||||
class KMemoryRegionTree;
|
||||
|
||||
class KMemoryRegion : public util::IntrusiveRedBlackTreeBaseNode<KMemoryRegion> {
|
||||
NON_COPYABLE(KMemoryRegion);
|
||||
NON_MOVEABLE(KMemoryRegion);
|
||||
private:
|
||||
friend class KMemoryRegionTree;
|
||||
private:
|
||||
uintptr_t address;
|
||||
uintptr_t pair_address;
|
||||
|
@ -149,7 +153,15 @@ namespace ams::kern {
|
|||
/* ... */
|
||||
}
|
||||
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, u32 r, u32 t) : KMemoryRegion(a, rs, std::numeric_limits<uintptr_t>::max(), r, t) { /* ... */ }
|
||||
|
||||
private:
|
||||
constexpr ALWAYS_INLINE void Reset(uintptr_t a, uintptr_t rs, uintptr_t p, u32 r, u32 t) {
|
||||
this->address = a;
|
||||
this->pair_address = p;
|
||||
this->region_size = rs;
|
||||
this->attributes = r;
|
||||
this->type_id = t;
|
||||
}
|
||||
public:
|
||||
constexpr ALWAYS_INLINE uintptr_t GetAddress() const {
|
||||
return this->address;
|
||||
}
|
||||
|
@ -250,56 +262,59 @@ namespace ams::kern {
|
|||
public:
|
||||
constexpr ALWAYS_INLINE KMemoryRegionTree() : tree() { /* ... */ }
|
||||
public:
|
||||
iterator FindContainingRegion(uintptr_t address) {
|
||||
return this->find(KMemoryRegion(address, 1, 0, 0));
|
||||
KMemoryRegion *FindModifiable(uintptr_t address) {
|
||||
if (auto it = this->find(KMemoryRegion(address, 1, 0, 0)); it != this->end()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
iterator FindFirstRegionByTypeAttr(u32 type_id, u32 attr = 0) {
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
const KMemoryRegion *Find(uintptr_t address) const {
|
||||
if (auto it = this->find(KMemoryRegion(address, 1, 0, 0)); it != this->cend()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindByType(u32 type_id) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); ++it) {
|
||||
if (it->GetType() == type_id) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindByTypeAndAttribute(u32 type_id, u32 attr) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); ++it) {
|
||||
if (it->GetType() == type_id && it->GetAttributes() == attr) {
|
||||
return it;
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
MESOSPHERE_INIT_ABORT();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
iterator FindFirstRegionByType(u32 type_id) {
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
if (it->GetType() == type_id) {
|
||||
return it;
|
||||
const KMemoryRegion *FindFirstDerived(u32 type_id) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
MESOSPHERE_INIT_ABORT();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
iterator TryFindFirstRegionByType(u32 type_id) {
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
if (it->GetType() == type_id) {
|
||||
return it;
|
||||
}
|
||||
}
|
||||
|
||||
return this->end();
|
||||
}
|
||||
|
||||
iterator FindFirstDerivedRegion(u32 type_id) {
|
||||
const KMemoryRegion *FindLastDerived(u32 type_id) const {
|
||||
const KMemoryRegion *region = nullptr;
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
return it;
|
||||
region = std::addressof(*it);
|
||||
}
|
||||
}
|
||||
MESOSPHERE_INIT_ABORT();
|
||||
return region;
|
||||
}
|
||||
|
||||
iterator TryFindFirstDerivedRegion(u32 type_id) {
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
return it;
|
||||
}
|
||||
}
|
||||
|
||||
return this->end();
|
||||
}
|
||||
|
||||
DerivedRegionExtents GetDerivedRegionExtents(u32 type_id) const {
|
||||
DerivedRegionExtents extents;
|
||||
|
@ -322,7 +337,9 @@ namespace ams::kern {
|
|||
return extents;
|
||||
}
|
||||
public:
|
||||
NOINLINE void InsertDirectly(uintptr_t address, size_t size, u32 attr = 0, u32 type_id = 0);
|
||||
NOINLINE bool Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr = 0, u32 old_attr = 0);
|
||||
|
||||
NOINLINE KVirtualAddress GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id);
|
||||
|
||||
ALWAYS_INLINE KVirtualAddress GetRandomAlignedRegionWithGuard(size_t size, size_t alignment, u32 type_id, size_t guard_size) {
|
||||
|
@ -401,297 +418,125 @@ namespace ams::kern {
|
|||
}
|
||||
};
|
||||
|
||||
class KMemoryRegionAllocator {
|
||||
NON_COPYABLE(KMemoryRegionAllocator);
|
||||
NON_MOVEABLE(KMemoryRegionAllocator);
|
||||
public:
|
||||
static constexpr size_t MaxMemoryRegions = 1000;
|
||||
friend class KMemoryLayout;
|
||||
private:
|
||||
KMemoryRegion region_heap[MaxMemoryRegions];
|
||||
size_t num_regions;
|
||||
private:
|
||||
constexpr ALWAYS_INLINE KMemoryRegionAllocator() : region_heap(), num_regions() { /* ... */ }
|
||||
public:
|
||||
ALWAYS_INLINE KMemoryRegion *Allocate() {
|
||||
/* Ensure we stay within the bounds of our heap. */
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(this->num_regions < MaxMemoryRegions);
|
||||
|
||||
return &this->region_heap[this->num_regions++];
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
ALWAYS_INLINE KMemoryRegion *Create(Args&&... args) {
|
||||
KMemoryRegion *region = this->Allocate();
|
||||
new (region) KMemoryRegion(std::forward<Args>(args)...);
|
||||
return region;
|
||||
}
|
||||
};
|
||||
|
||||
class KMemoryLayout {
|
||||
private:
|
||||
static /* constinit */ inline uintptr_t s_linear_phys_to_virt_diff;
|
||||
static /* constinit */ inline uintptr_t s_linear_virt_to_phys_diff;
|
||||
static /* constinit */ inline KMemoryRegionAllocator s_region_allocator;
|
||||
static /* constinit */ inline KMemoryRegionTree s_virtual_tree;
|
||||
static /* constinit */ inline KMemoryRegionTree s_physical_tree;
|
||||
static /* constinit */ inline KMemoryRegionTree s_virtual_linear_tree;
|
||||
static /* constinit */ inline KMemoryRegionTree s_physical_linear_tree;
|
||||
private:
|
||||
static ALWAYS_INLINE auto GetVirtualLinearExtents(const KMemoryRegionTree::DerivedRegionExtents physical) {
|
||||
return KMemoryRegion(GetInteger(GetLinearVirtualAddress(physical.GetAddress())), physical.GetSize(), 0, KMemoryRegionType_None);
|
||||
template<typename AddressType> requires IsKTypedAddress<AddressType>
|
||||
static ALWAYS_INLINE bool IsTypedAddress(const KMemoryRegion *®ion, AddressType address, KMemoryRegionTree &tree, KMemoryRegionType type) {
|
||||
/* Check if the cached region already contains the address. */
|
||||
if (region != nullptr && region->Contains(GetInteger(address))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Find the containing region, and update the cache. */
|
||||
if (const KMemoryRegion *found = tree.Find(GetInteger(address)); found != nullptr && found->IsDerivedFrom(type)) {
|
||||
region = found;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename AddressType> requires IsKTypedAddress<AddressType>
|
||||
static ALWAYS_INLINE bool IsTypedAddress(const KMemoryRegion *®ion, AddressType address, size_t size, KMemoryRegionTree &tree, KMemoryRegionType type) {
|
||||
/* Get the end of the checked region. */
|
||||
const uintptr_t last_address = GetInteger(address) + size - 1;
|
||||
|
||||
/* Walk the tree to verify the region is correct. */
|
||||
const KMemoryRegion *cur = (region != nullptr && region->Contains(GetInteger(address))) ? region : tree.Find(GetInteger(address));
|
||||
while (cur != nullptr && cur->IsDerivedFrom(type)) {
|
||||
if (last_address <= cur->GetLastAddress()) {
|
||||
region = cur;
|
||||
return true;
|
||||
}
|
||||
|
||||
cur = cur->GetNext();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
template<typename AddressType> requires IsKTypedAddress<AddressType>
|
||||
static ALWAYS_INLINE const KMemoryRegion *Find(AddressType address, const KMemoryRegionTree &tree) {
|
||||
return tree.Find(GetInteger(address));
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE KMemoryRegion &Dereference(KMemoryRegion *region) {
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(region != nullptr);
|
||||
return *region;
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE const KMemoryRegion &Dereference(const KMemoryRegion *region) {
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(region != nullptr);
|
||||
return *region;
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE KVirtualAddress GetStackTopAddress(s32 core_id, KMemoryRegionType type) {
|
||||
return Dereference(GetVirtualMemoryRegionTree().FindByTypeAndAttribute(type, static_cast<u32>(core_id))).GetEndAddress();
|
||||
}
|
||||
public:
|
||||
static ALWAYS_INLINE KMemoryRegionAllocator &GetMemoryRegionAllocator() { return s_region_allocator; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualMemoryRegionTree() { return s_virtual_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalMemoryRegionTree() { return s_physical_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualLinearMemoryRegionTree() { return s_virtual_linear_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalLinearMemoryRegionTree() { return s_physical_linear_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualMemoryRegionTree() { return s_virtual_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalMemoryRegionTree() { return s_physical_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualLinearMemoryRegionTree() { return s_virtual_linear_tree; }
|
||||
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalLinearMemoryRegionTree() { return s_physical_linear_tree; }
|
||||
|
||||
static ALWAYS_INLINE KMemoryRegionTree::iterator GetEnd(KVirtualAddress) {
|
||||
return GetVirtualLinearMemoryRegionTree().end();
|
||||
}
|
||||
static ALWAYS_INLINE KVirtualAddress GetLinearVirtualAddress(KPhysicalAddress address) { return GetInteger(address) + s_linear_phys_to_virt_diff; }
|
||||
static ALWAYS_INLINE KPhysicalAddress GetLinearPhysicalAddress(KVirtualAddress address) { return GetInteger(address) + s_linear_virt_to_phys_diff; }
|
||||
|
||||
static ALWAYS_INLINE KMemoryRegionTree::iterator GetEnd(KPhysicalAddress) {
|
||||
return GetPhysicalMemoryRegionTree().end();
|
||||
}
|
||||
static NOINLINE const KMemoryRegion *Find(KVirtualAddress address) { return Find(address, GetVirtualMemoryRegionTree()); }
|
||||
static NOINLINE const KMemoryRegion *Find(KPhysicalAddress address) { return Find(address, GetPhysicalMemoryRegionTree()); }
|
||||
|
||||
static NOINLINE KMemoryRegionTree::iterator FindContainingRegion(KVirtualAddress address) {
|
||||
return GetVirtualMemoryRegionTree().FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
static NOINLINE const KMemoryRegion *FindLinear(KVirtualAddress address) { return Find(address, GetVirtualLinearMemoryRegionTree()); }
|
||||
static NOINLINE const KMemoryRegion *FindLinear(KPhysicalAddress address) { return Find(address, GetPhysicalLinearMemoryRegionTree()); }
|
||||
|
||||
static NOINLINE KMemoryRegionTree::iterator FindContainingRegion(KPhysicalAddress address) {
|
||||
return GetPhysicalMemoryRegionTree().FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
static NOINLINE KVirtualAddress GetMainStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscMainStack); }
|
||||
static NOINLINE KVirtualAddress GetIdleStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscIdleStack); }
|
||||
static NOINLINE KVirtualAddress GetExceptionStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscExceptionStack); }
|
||||
|
||||
static ALWAYS_INLINE KVirtualAddress GetLinearVirtualAddress(KPhysicalAddress address) {
|
||||
return GetInteger(address) + s_linear_phys_to_virt_diff;
|
||||
}
|
||||
static NOINLINE KVirtualAddress GetSlabRegionAddress() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelSlab)).GetAddress(); }
|
||||
static NOINLINE KVirtualAddress GetCoreLocalRegionAddress() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_CoreLocal)).GetAddress(); }
|
||||
|
||||
static ALWAYS_INLINE KPhysicalAddress GetLinearPhysicalAddress(KVirtualAddress address) {
|
||||
return GetInteger(address) + s_linear_virt_to_phys_diff;
|
||||
}
|
||||
static NOINLINE KVirtualAddress GetInterruptDistributorAddress() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_InterruptDistributor)).GetPairAddress(); }
|
||||
static NOINLINE KVirtualAddress GetInterruptCpuInterfaceAddress() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_InterruptCpuInterface)).GetPairAddress(); }
|
||||
static NOINLINE KVirtualAddress GetUartAddress() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_Uart)).GetPairAddress(); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetMainStackTopAddress(s32 core_id) {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscMainStack, static_cast<u32>(core_id))->GetEndAddress();
|
||||
}
|
||||
static NOINLINE const KMemoryRegion &GetMemoryControllerRegion() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_MemoryController)); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetIdleStackTopAddress(s32 core_id) {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscIdleStack, static_cast<u32>(core_id))->GetEndAddress();
|
||||
}
|
||||
static NOINLINE const KMemoryRegion &GetMetadataPoolRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramMetadataPool)); }
|
||||
static NOINLINE const KMemoryRegion &GetPageTableHeapRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualKernelPtHeap)); }
|
||||
static NOINLINE const KMemoryRegion &GetKernelStackRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelStack)); }
|
||||
static NOINLINE const KMemoryRegion &GetTempRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelTemp)); }
|
||||
static NOINLINE const KMemoryRegion &GetCoreLocalRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_CoreLocal)); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetExceptionStackTopAddress(s32 core_id) {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast<u32>(core_id))->GetEndAddress();
|
||||
}
|
||||
static NOINLINE const KMemoryRegion &GetKernelTraceBufferRegion() { return Dereference(GetVirtualLinearMemoryRegionTree().FindByType(KMemoryRegionType_VirtualKernelTraceBuffer)); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetSlabRegionAddress() {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_KernelSlab)->GetAddress();
|
||||
}
|
||||
static NOINLINE const KMemoryRegion &GetVirtualLinearRegion(KVirtualAddress address) { return Dereference(FindLinear(address)); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetCoreLocalRegionAddress() {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_CoreLocal)->GetAddress();
|
||||
}
|
||||
static NOINLINE const KMemoryRegion *GetPhysicalKernelTraceBufferRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_KernelTraceBuffer); }
|
||||
static NOINLINE const KMemoryRegion *GetPhysicalOnMemoryBootImageRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_OnMemoryBootImage); }
|
||||
static NOINLINE const KMemoryRegion *GetPhysicalDTBRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DTB); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetInterruptDistributorAddress() {
|
||||
return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_InterruptDistributor)->GetPairAddress();
|
||||
}
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramNonKernel); }
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address) { return IsTypedAddress(region, address, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramManagedPool); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetInterruptCpuInterfaceAddress() {
|
||||
return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_InterruptCpuInterface)->GetPairAddress();
|
||||
}
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramNonKernel); }
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address, size_t size) { return IsTypedAddress(region, address, size, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramManagedPool); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetUartAddress() {
|
||||
return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_Uart)->GetPairAddress();
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetMemoryControllerRegion() {
|
||||
return *GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_MemoryController);
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetMetadataPoolRegion() {
|
||||
return *GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_VirtualDramMetadataPool);
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetPageTableHeapRegion() {
|
||||
return *GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_VirtualKernelPtHeap);
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetKernelStackRegion() {
|
||||
return *GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_KernelStack);
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetTempRegion() {
|
||||
return *GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_KernelTemp);
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetKernelTraceBufferRegion() {
|
||||
return *GetVirtualLinearMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_VirtualKernelTraceBuffer);
|
||||
}
|
||||
|
||||
static NOINLINE KMemoryRegion &GetVirtualLinearRegion(KVirtualAddress address) {
|
||||
return *GetVirtualLinearMemoryRegionTree().FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
|
||||
static NOINLINE const KMemoryRegion *TryGetKernelTraceBufferRegion() {
|
||||
auto &tree = GetPhysicalMemoryRegionTree();
|
||||
if (KMemoryRegionTree::const_iterator it = tree.TryFindFirstDerivedRegion(KMemoryRegionType_KernelTraceBuffer); it != tree.end()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
static NOINLINE const KMemoryRegion *TryGetOnMemoryBootImageRegion() {
|
||||
auto &tree = GetPhysicalMemoryRegionTree();
|
||||
if (KMemoryRegionTree::const_iterator it = tree.TryFindFirstDerivedRegion(KMemoryRegionType_OnMemoryBootImage); it != tree.end()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
static NOINLINE const KMemoryRegion *TryGetDTBRegion() {
|
||||
auto &tree = GetPhysicalMemoryRegionTree();
|
||||
if (KMemoryRegionTree::const_iterator it = tree.TryFindFirstDerivedRegion(KMemoryRegionType_DTB); it != tree.end()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion **out, KPhysicalAddress address, const KMemoryRegion *hint = nullptr) {
|
||||
auto &tree = GetPhysicalLinearMemoryRegionTree();
|
||||
KMemoryRegionTree::const_iterator it = tree.end();
|
||||
if (hint != nullptr) {
|
||||
it = tree.iterator_to(*hint);
|
||||
}
|
||||
if (it == tree.end() || !it->Contains(GetInteger(address))) {
|
||||
it = tree.FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
if (it != tree.end() && it->IsDerivedFrom(KMemoryRegionType_DramNonKernel)) {
|
||||
if (out) {
|
||||
*out = std::addressof(*it);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion **out, KPhysicalAddress address, size_t size, const KMemoryRegion *hint = nullptr) {
|
||||
auto &tree = GetPhysicalLinearMemoryRegionTree();
|
||||
KMemoryRegionTree::const_iterator it = tree.end();
|
||||
if (hint != nullptr) {
|
||||
it = tree.iterator_to(*hint);
|
||||
}
|
||||
if (it == tree.end() || !it->Contains(GetInteger(address))) {
|
||||
it = tree.FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
if (it != tree.end() && it->IsDerivedFrom(KMemoryRegionType_DramNonKernel)) {
|
||||
const uintptr_t last_address = GetInteger(address) + size - 1;
|
||||
do {
|
||||
if (last_address <= it->GetLastAddress()) {
|
||||
if (out) {
|
||||
*out = std::addressof(*it);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
it++;
|
||||
} while (it != tree.end() && it->IsDerivedFrom(KMemoryRegionType_DramNonKernel));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion **out, KPhysicalAddress address, const KMemoryRegion *hint = nullptr) {
|
||||
auto &tree = GetPhysicalLinearMemoryRegionTree();
|
||||
KMemoryRegionTree::const_iterator it = tree.end();
|
||||
if (hint != nullptr) {
|
||||
it = tree.iterator_to(*hint);
|
||||
}
|
||||
if (it == tree.end() || !it->Contains(GetInteger(address))) {
|
||||
it = tree.FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
if (it != tree.end() && it->IsDerivedFrom(KMemoryRegionAttr_LinearMapped)) {
|
||||
if (out) {
|
||||
*out = std::addressof(*it);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion **out, KPhysicalAddress address, size_t size, const KMemoryRegion *hint = nullptr) {
|
||||
auto &tree = GetPhysicalLinearMemoryRegionTree();
|
||||
KMemoryRegionTree::const_iterator it = tree.end();
|
||||
if (hint != nullptr) {
|
||||
it = tree.iterator_to(*hint);
|
||||
}
|
||||
if (it == tree.end() || !it->Contains(GetInteger(address))) {
|
||||
it = tree.FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
if (it != tree.end() && it->IsDerivedFrom(KMemoryRegionAttr_LinearMapped)) {
|
||||
const uintptr_t last_address = GetInteger(address) + size - 1;
|
||||
do {
|
||||
if (last_address <= it->GetLastAddress()) {
|
||||
if (out) {
|
||||
*out = std::addressof(*it);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
it++;
|
||||
} while (it != tree.end() && it->IsDerivedFrom(KMemoryRegionAttr_LinearMapped));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion **out, KVirtualAddress address, const KMemoryRegion *hint = nullptr) {
|
||||
auto &tree = GetVirtualLinearMemoryRegionTree();
|
||||
KMemoryRegionTree::const_iterator it = tree.end();
|
||||
if (hint != nullptr) {
|
||||
it = tree.iterator_to(*hint);
|
||||
}
|
||||
if (it == tree.end() || !it->Contains(GetInteger(address))) {
|
||||
it = tree.FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
if (it != tree.end() && it->IsDerivedFrom(KMemoryRegionType_VirtualDramManagedPool)) {
|
||||
if (out) {
|
||||
*out = std::addressof(*it);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion **out, KVirtualAddress address, size_t size, const KMemoryRegion *hint = nullptr) {
|
||||
auto &tree = GetVirtualLinearMemoryRegionTree();
|
||||
KMemoryRegionTree::const_iterator it = tree.end();
|
||||
if (hint != nullptr) {
|
||||
it = tree.iterator_to(*hint);
|
||||
}
|
||||
if (it == tree.end() || !it->Contains(GetInteger(address))) {
|
||||
it = tree.FindContainingRegion(GetInteger(address));
|
||||
}
|
||||
if (it != tree.end() && it->IsDerivedFrom(KMemoryRegionType_VirtualDramManagedPool)) {
|
||||
const uintptr_t last_address = GetInteger(address) + size - 1;
|
||||
do {
|
||||
if (last_address <= it->GetLastAddress()) {
|
||||
if (out) {
|
||||
*out = std::addressof(*it);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
it++;
|
||||
} while (it != tree.end() && it->IsDerivedFrom(KMemoryRegionType_VirtualDramManagedPool));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionAttr_LinearMapped); }
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionAttr_LinearMapped); }
|
||||
|
||||
static NOINLINE std::tuple<size_t, size_t> GetTotalAndKernelMemorySizes() {
|
||||
size_t total_size = 0, kernel_size = 0;
|
||||
for (auto it = GetPhysicalMemoryRegionTree().cbegin(); it != GetPhysicalMemoryRegionTree().cend(); it++) {
|
||||
if (it->IsDerivedFrom(KMemoryRegionType_Dram)) {
|
||||
total_size += it->GetSize();
|
||||
if (!it->IsDerivedFrom(KMemoryRegionType_DramNonKernel)) {
|
||||
kernel_size += it->GetSize();
|
||||
for (const auto ®ion : GetPhysicalMemoryRegionTree()) {
|
||||
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) {
|
||||
total_size += region.GetSize();
|
||||
if (!region.IsDerivedFrom(KMemoryRegionType_DramNonKernel)) {
|
||||
kernel_size += region.GetSize();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -700,93 +545,37 @@ namespace ams::kern {
|
|||
|
||||
static void InitializeLinearMemoryRegionTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start);
|
||||
|
||||
static NOINLINE auto GetKernelRegionExtents() {
|
||||
return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Kernel);
|
||||
static NOINLINE auto GetKernelRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Kernel); }
|
||||
static NOINLINE auto GetKernelCodeRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelCode); }
|
||||
static NOINLINE auto GetKernelStackRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelStack); }
|
||||
static NOINLINE auto GetKernelMiscRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelMisc); }
|
||||
static NOINLINE auto GetKernelSlabRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelSlab); }
|
||||
|
||||
|
||||
static NOINLINE auto GetLinearRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped); }
|
||||
|
||||
static NOINLINE auto GetLinearRegionVirtualExtents() {
|
||||
auto physical = GetLinearRegionPhysicalExtents();
|
||||
return KMemoryRegion(GetInteger(GetLinearVirtualAddress(physical.GetAddress())), physical.GetSize(), 0, KMemoryRegionType_None);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelCodeRegionExtents() {
|
||||
return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelCode);
|
||||
}
|
||||
static NOINLINE auto GetMainMemoryPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram); }
|
||||
static NOINLINE auto GetCarveoutRegionExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_CarveoutProtected); }
|
||||
|
||||
static NOINLINE auto GetKernelStackRegionExtents() {
|
||||
return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelStack);
|
||||
}
|
||||
static NOINLINE auto GetKernelRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernel); }
|
||||
static NOINLINE auto GetKernelCodeRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelCode); }
|
||||
static NOINLINE auto GetKernelSlabRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelSlab); }
|
||||
static NOINLINE auto GetKernelPageTableHeapRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelPtHeap); }
|
||||
static NOINLINE auto GetKernelInitPageTableRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelInitPt); }
|
||||
|
||||
static NOINLINE auto GetKernelMiscRegionExtents() {
|
||||
return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelMisc);
|
||||
}
|
||||
static NOINLINE auto GetKernelPoolPartitionRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramPoolPartition); }
|
||||
static NOINLINE auto GetKernelMetadataPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramMetadataPool); }
|
||||
static NOINLINE auto GetKernelSystemPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemPool); }
|
||||
static NOINLINE auto GetKernelSystemNonSecurePoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemNonSecurePool); }
|
||||
static NOINLINE auto GetKernelAppletPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramAppletPool); }
|
||||
static NOINLINE auto GetKernelApplicationPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramApplicationPool); }
|
||||
|
||||
static NOINLINE auto GetKernelSlabRegionExtents() {
|
||||
return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelSlab);
|
||||
}
|
||||
|
||||
static NOINLINE const KMemoryRegion &GetCoreLocalRegion() {
|
||||
return *GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_CoreLocal);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetLinearRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetLinearRegionExtents() {
|
||||
return GetVirtualLinearExtents(GetLinearRegionPhysicalExtents());
|
||||
}
|
||||
|
||||
static NOINLINE auto GetMainMemoryPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetCarveoutRegionExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_CarveoutProtected);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernel);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelCodeRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelCode);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelSlabRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelSlab);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelPageTableHeapRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelPtHeap);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelInitPageTableRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelInitPt);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelPoolPartitionRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramPoolPartition);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelMetadataPoolRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramMetadataPool);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelSystemPoolRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemPool);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelSystemNonSecurePoolRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemNonSecurePool);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelAppletPoolRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramAppletPool);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelApplicationPoolRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramApplicationPool);
|
||||
}
|
||||
|
||||
static NOINLINE auto GetKernelTraceBufferRegionPhysicalExtents() {
|
||||
return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelTraceBuffer);
|
||||
}
|
||||
static NOINLINE auto GetKernelTraceBufferRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelTraceBuffer); }
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -205,43 +205,43 @@ namespace ams::kern {
|
|||
bool IsLinearMappedPhysicalAddress(KPhysicalAddress phys_addr) {
|
||||
MESOSPHERE_ASSERT(this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsLinearMappedPhysicalAddress(std::addressof(this->cached_physical_linear_region), phys_addr, this->cached_physical_linear_region);
|
||||
return KMemoryLayout::IsLinearMappedPhysicalAddress(this->cached_physical_linear_region, phys_addr);
|
||||
}
|
||||
|
||||
bool IsLinearMappedPhysicalAddress(KPhysicalAddress phys_addr, size_t size) {
|
||||
MESOSPHERE_ASSERT(this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsLinearMappedPhysicalAddress(std::addressof(this->cached_physical_linear_region), phys_addr, size, this->cached_physical_linear_region);
|
||||
return KMemoryLayout::IsLinearMappedPhysicalAddress(this->cached_physical_linear_region, phys_addr, size);
|
||||
}
|
||||
|
||||
bool IsHeapPhysicalAddress(KPhysicalAddress phys_addr) {
|
||||
MESOSPHERE_ASSERT(this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(std::addressof(this->cached_physical_heap_region), phys_addr, this->cached_physical_heap_region);
|
||||
}
|
||||
|
||||
bool IsHeapPhysicalAddressForFinalize(KPhysicalAddress phys_addr) {
|
||||
MESOSPHERE_ASSERT(!this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(std::addressof(this->cached_physical_heap_region), phys_addr, this->cached_physical_heap_region);
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(this->cached_physical_heap_region, phys_addr);
|
||||
}
|
||||
|
||||
bool IsHeapPhysicalAddress(KPhysicalAddress phys_addr, size_t size) {
|
||||
MESOSPHERE_ASSERT(this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(std::addressof(this->cached_physical_heap_region), phys_addr, size, this->cached_physical_heap_region);
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(this->cached_physical_heap_region, phys_addr, size);
|
||||
}
|
||||
|
||||
bool IsHeapPhysicalAddressForFinalize(KPhysicalAddress phys_addr) {
|
||||
MESOSPHERE_ASSERT(!this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsHeapPhysicalAddress(this->cached_physical_heap_region, phys_addr);
|
||||
}
|
||||
|
||||
bool IsHeapVirtualAddress(KVirtualAddress virt_addr) {
|
||||
MESOSPHERE_ASSERT(this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsHeapVirtualAddress(std::addressof(this->cached_virtual_heap_region), virt_addr, this->cached_virtual_heap_region);
|
||||
return KMemoryLayout::IsHeapVirtualAddress(this->cached_virtual_heap_region, virt_addr);
|
||||
}
|
||||
|
||||
bool IsHeapVirtualAddress(KVirtualAddress virt_addr, size_t size) {
|
||||
MESOSPHERE_ASSERT(this->IsLockedByCurrentThread());
|
||||
|
||||
return KMemoryLayout::IsHeapVirtualAddress(std::addressof(this->cached_virtual_heap_region), virt_addr, size, this->cached_virtual_heap_region);
|
||||
return KMemoryLayout::IsHeapVirtualAddress(this->cached_virtual_heap_region, virt_addr, size);
|
||||
}
|
||||
|
||||
bool ContainsPages(KProcessAddress addr, size_t num_pages) const {
|
||||
|
|
|
@ -183,6 +183,9 @@ namespace ams::kern {
|
|||
|
||||
#endif
|
||||
|
||||
template<typename T>
|
||||
concept IsKTypedAddress = std::same_as<T, KPhysicalAddress> || std::same_as<T, KVirtualAddress> || std::same_as<T, KProcessAddress>;
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T Null = [] {
|
||||
if constexpr (std::is_same<T, uintptr_t>::value) {
|
||||
|
|
|
@ -179,12 +179,12 @@ namespace ams::kern::board::nintendo::nx {
|
|||
|
||||
bool IsRegisterAccessibleToPrivileged(ams::svc::PhysicalAddress address) {
|
||||
/* Find the region for the address. */
|
||||
KMemoryRegionTree::const_iterator it = KMemoryLayout::FindContainingRegion(KPhysicalAddress(address));
|
||||
if (AMS_LIKELY(it != KMemoryLayout::GetPhysicalMemoryRegionTree().end())) {
|
||||
if (AMS_LIKELY(it->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController))) {
|
||||
const KMemoryRegion *region = KMemoryLayout::Find(KPhysicalAddress(address));
|
||||
if (AMS_LIKELY(region != nullptr)) {
|
||||
if (AMS_LIKELY(region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController))) {
|
||||
/* Get the offset within the region. */
|
||||
const size_t offset = address - it->GetAddress();
|
||||
MESOSPHERE_ABORT_UNLESS(offset < it->GetSize());
|
||||
const size_t offset = address - region->GetAddress();
|
||||
MESOSPHERE_ABORT_UNLESS(offset < region->GetSize());
|
||||
|
||||
/* Check the whitelist. */
|
||||
if (AMS_LIKELY(CheckRegisterAllowedTable(McKernelRegisterWhitelist, offset))) {
|
||||
|
@ -198,21 +198,21 @@ namespace ams::kern::board::nintendo::nx {
|
|||
|
||||
bool IsRegisterAccessibleToUser(ams::svc::PhysicalAddress address) {
|
||||
/* Find the region for the address. */
|
||||
KMemoryRegionTree::const_iterator it = KMemoryLayout::FindContainingRegion(KPhysicalAddress(address));
|
||||
if (AMS_LIKELY(it != KMemoryLayout::GetPhysicalMemoryRegionTree().end())) {
|
||||
const KMemoryRegion *region = KMemoryLayout::Find(KPhysicalAddress(address));
|
||||
if (AMS_LIKELY(region != nullptr)) {
|
||||
/* The PMC is always allowed. */
|
||||
if (it->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_PowerManagementController)) {
|
||||
if (region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_PowerManagementController)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Memory controller is allowed if the register is whitelisted. */
|
||||
if (it->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController ) ||
|
||||
it->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController0) ||
|
||||
it->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController1))
|
||||
if (region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController ) ||
|
||||
region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController0) ||
|
||||
region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController1))
|
||||
{
|
||||
/* Get the offset within the region. */
|
||||
const size_t offset = address - it->GetAddress();
|
||||
MESOSPHERE_ABORT_UNLESS(offset < it->GetSize());
|
||||
const size_t offset = address - region->GetAddress();
|
||||
MESOSPHERE_ABORT_UNLESS(offset < region->GetSize());
|
||||
|
||||
/* Check the whitelist. */
|
||||
if (AMS_LIKELY(CheckRegisterAllowedTable(McUserRegisterWhitelist, offset))) {
|
||||
|
|
|
@ -98,7 +98,9 @@ namespace ams::kern::init {
|
|||
KVirtualAddress start = util::AlignUp(GetInteger(address), alignof(T));
|
||||
|
||||
if (size > 0) {
|
||||
MESOSPHERE_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(GetInteger(start) + size - 1)->IsDerivedFrom(KMemoryRegionType_KernelSlab));
|
||||
const KMemoryRegion *region = KMemoryLayout::Find(start + size - 1);
|
||||
MESOSPHERE_ABORT_UNLESS(region != nullptr);
|
||||
MESOSPHERE_ABORT_UNLESS(region->IsDerivedFrom(KMemoryRegionType_KernelSlab));
|
||||
T::InitializeSlabHeap(GetVoidPointer(start), size);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,55 +17,99 @@
|
|||
|
||||
namespace ams::kern {
|
||||
|
||||
namespace {
|
||||
|
||||
class KMemoryRegionAllocator {
|
||||
NON_COPYABLE(KMemoryRegionAllocator);
|
||||
NON_MOVEABLE(KMemoryRegionAllocator);
|
||||
public:
|
||||
static constexpr size_t MaxMemoryRegions = 1000;
|
||||
private:
|
||||
KMemoryRegion region_heap[MaxMemoryRegions];
|
||||
size_t num_regions;
|
||||
public:
|
||||
constexpr ALWAYS_INLINE KMemoryRegionAllocator() : region_heap(), num_regions() { /* ... */ }
|
||||
public:
|
||||
template<typename... Args>
|
||||
ALWAYS_INLINE KMemoryRegion *Allocate(Args&&... args) {
|
||||
/* Ensure we stay within the bounds of our heap. */
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(this->num_regions < MaxMemoryRegions);
|
||||
|
||||
/* Create the new region. */
|
||||
KMemoryRegion *region = std::addressof(this->region_heap[this->num_regions++]);
|
||||
new (region) KMemoryRegion(std::forward<Args>(args)...);
|
||||
|
||||
return region;
|
||||
|
||||
return &this->region_heap[this->num_regions++];
|
||||
}
|
||||
};
|
||||
|
||||
constinit KMemoryRegionAllocator g_memory_region_allocator;
|
||||
|
||||
template<typename... Args>
|
||||
ALWAYS_INLINE KMemoryRegion *AllocateRegion(Args&&... args) {
|
||||
return g_memory_region_allocator.Allocate(std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
void KMemoryRegionTree::InsertDirectly(uintptr_t address, size_t size, u32 attr, u32 type_id) {
|
||||
this->insert(*AllocateRegion(address, size, attr, type_id));
|
||||
}
|
||||
|
||||
bool KMemoryRegionTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) {
|
||||
/* Locate the memory region that contains the address. */
|
||||
auto it = this->FindContainingRegion(address);
|
||||
KMemoryRegion *found = this->FindModifiable(address);
|
||||
|
||||
/* We require that the old attr is correct. */
|
||||
if (it->GetAttributes() != old_attr) {
|
||||
if (found->GetAttributes() != old_attr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* We further require that the region can be split from the old region. */
|
||||
const uintptr_t inserted_region_end = address + size;
|
||||
const uintptr_t inserted_region_last = inserted_region_end - 1;
|
||||
if (it->GetLastAddress() < inserted_region_last) {
|
||||
if (found->GetLastAddress() < inserted_region_last) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Further, we require that the type id is a valid transformation. */
|
||||
if (!it->CanDerive(type_id)) {
|
||||
if (!found->CanDerive(type_id)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Cache information from the region before we remove it. */
|
||||
KMemoryRegion *cur_region = std::addressof(*it);
|
||||
const uintptr_t old_address = it->GetAddress();
|
||||
const size_t old_size = it->GetSize();
|
||||
const uintptr_t old_address = found->GetAddress();
|
||||
const size_t old_size = found->GetSize();
|
||||
const uintptr_t old_end = old_address + old_size;
|
||||
const uintptr_t old_last = old_end - 1;
|
||||
const uintptr_t old_pair = it->GetPairAddress();
|
||||
const u32 old_type = it->GetType();
|
||||
const uintptr_t old_pair = found->GetPairAddress();
|
||||
const u32 old_type = found->GetType();
|
||||
|
||||
/* Erase the existing region from the tree. */
|
||||
this->erase(it);
|
||||
this->erase(this->iterator_to(*found));
|
||||
|
||||
/* If we need to insert a region before the region, do so. */
|
||||
if (old_address != address) {
|
||||
new (cur_region) KMemoryRegion(old_address, address - old_address, old_pair, old_attr, old_type);
|
||||
this->insert(*cur_region);
|
||||
cur_region = KMemoryLayout::GetMemoryRegionAllocator().Allocate();
|
||||
}
|
||||
|
||||
/* Insert a new region. */
|
||||
/* Insert the new region into the tree. */
|
||||
const uintptr_t new_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (address - old_address) : old_pair;
|
||||
new (cur_region) KMemoryRegion(address, size, new_pair, new_attr, type_id);
|
||||
this->insert(*cur_region);
|
||||
if (old_address == address) {
|
||||
/* Reuse the old object for the new region, if we can. */
|
||||
found->Reset(address, size, new_pair, new_attr, type_id);
|
||||
this->insert(*found);
|
||||
} else {
|
||||
/* If we can't re-use, adjust the old region. */
|
||||
found->Reset(old_address, address - old_address, old_pair, old_attr, old_type);
|
||||
this->insert(*found);
|
||||
|
||||
/* Insert a new region for the split. */
|
||||
this->insert(*AllocateRegion(address, size, new_pair, new_attr, type_id));
|
||||
}
|
||||
|
||||
/* If we need to insert a region after the region, do so. */
|
||||
if (old_last != inserted_region_last) {
|
||||
const uintptr_t after_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (inserted_region_end - old_address) : old_pair;
|
||||
this->insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(inserted_region_end, old_end - inserted_region_end, after_pair, old_attr, old_type));
|
||||
this->insert(*AllocateRegion(inserted_region_end, old_end - inserted_region_end, after_pair, old_attr, old_type));
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -96,16 +140,11 @@ namespace ams::kern {
|
|||
continue;
|
||||
}
|
||||
|
||||
/* Locate the candidate region, and ensure it fits. */
|
||||
const KMemoryRegion *candidate_region = std::addressof(*this->FindContainingRegion(candidate));
|
||||
if (candidate_last > candidate_region->GetLastAddress()) {
|
||||
/* Locate the candidate region, and ensure it fits and has the correct type id. */
|
||||
if (const auto &candidate_region = *this->Find(candidate); !(candidate_last <= candidate_region.GetLastAddress() && candidate_region.GetType() == type_id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Ensure that the region has the correct type id. */
|
||||
if (candidate_region->GetType() != type_id)
|
||||
continue;
|
||||
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
@ -117,17 +156,15 @@ namespace ams::kern {
|
|||
|
||||
/* Initialize linear trees. */
|
||||
for (auto ®ion : GetPhysicalMemoryRegionTree()) {
|
||||
if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
|
||||
continue;
|
||||
if (region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
|
||||
GetPhysicalLinearMemoryRegionTree().InsertDirectly(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType());
|
||||
}
|
||||
GetPhysicalLinearMemoryRegionTree().insert(*GetMemoryRegionAllocator().Create(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType()));
|
||||
}
|
||||
|
||||
for (auto ®ion : GetVirtualMemoryRegionTree()) {
|
||||
if (!region.IsDerivedFrom(KMemoryRegionType_Dram)) {
|
||||
continue;
|
||||
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) {
|
||||
GetVirtualLinearMemoryRegionTree().InsertDirectly(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType());
|
||||
}
|
||||
GetVirtualLinearMemoryRegionTree().insert(*GetMemoryRegionAllocator().Create(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,13 +189,13 @@ namespace ams::kern {
|
|||
const uintptr_t candidate_end = candidate_start + CoreLocalRegionSizeWithGuards;
|
||||
const uintptr_t candidate_last = candidate_end - 1;
|
||||
|
||||
const KMemoryRegion *containing_region = std::addressof(*KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(candidate_start));
|
||||
const auto &containing_region = *KMemoryLayout::GetVirtualMemoryRegionTree().Find(candidate_start);
|
||||
|
||||
if (candidate_last > containing_region->GetLastAddress()) {
|
||||
if (candidate_last > containing_region.GetLastAddress()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (containing_region->GetType() != KMemoryRegionType_None) {
|
||||
if (containing_region.GetType() != KMemoryRegionType_None) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -166,11 +203,11 @@ namespace ams::kern {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (containing_region->GetAddress() > util::AlignDown(candidate_start, CoreLocalRegionBoundsAlign)) {
|
||||
if (containing_region.GetAddress() > util::AlignDown(candidate_start, CoreLocalRegionBoundsAlign)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (util::AlignUp(candidate_last, CoreLocalRegionBoundsAlign) - 1 > containing_region->GetLastAddress()) {
|
||||
if (util::AlignUp(candidate_last, CoreLocalRegionBoundsAlign) - 1 > containing_region.GetLastAddress()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -182,7 +219,9 @@ namespace ams::kern {
|
|||
void InsertPoolPartitionRegionIntoBothTrees(size_t start, size_t size, KMemoryRegionType phys_type, KMemoryRegionType virt_type, u32 &cur_attr) {
|
||||
const u32 attr = cur_attr++;
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(start, size, phys_type, attr));
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(phys_type, attr)->GetPairAddress(), size, virt_type, attr));
|
||||
const KMemoryRegion *phys = KMemoryLayout::GetPhysicalMemoryRegionTree().FindByTypeAndAttribute(phys_type, attr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(phys != nullptr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(phys->GetPairAddress(), size, virt_type, attr));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -237,11 +276,16 @@ namespace ams::kern {
|
|||
const size_t unsafe_system_pool_min_size = KSystemControl::Init::GetMinimumNonSecureSystemPoolSize();
|
||||
|
||||
/* Find the start of the kernel DRAM region. */
|
||||
const uintptr_t kernel_dram_start = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_DramKernel)->GetAddress();
|
||||
const KMemoryRegion *kernel_dram_region = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DramKernel);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(kernel_dram_region != nullptr);
|
||||
|
||||
const uintptr_t kernel_dram_start = kernel_dram_region->GetAddress();
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(kernel_dram_start, CarveoutAlignment));
|
||||
|
||||
/* Find the start of the pool partitions region. */
|
||||
const uintptr_t pool_partitions_start = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramPoolPartition)->GetAddress();
|
||||
const KMemoryRegion *pool_partitions_region = KMemoryLayout::GetPhysicalMemoryRegionTree().FindByTypeAndAttribute(KMemoryRegionType_DramPoolPartition, 0);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(pool_partitions_region != nullptr);
|
||||
const uintptr_t pool_partitions_start = pool_partitions_region->GetAddress();
|
||||
|
||||
/* Decide on starting addresses for our pools. */
|
||||
const uintptr_t application_pool_start = pool_end - application_pool_size;
|
||||
|
|
|
@ -1582,28 +1582,30 @@ namespace ams::kern {
|
|||
const size_t region_num_pages = region_size / PageSize;
|
||||
|
||||
/* Locate the memory region. */
|
||||
auto region_it = KMemoryLayout::FindContainingRegion(phys_addr);
|
||||
const auto end_it = KMemoryLayout::GetEnd(phys_addr);
|
||||
R_UNLESS(region_it != end_it, svc::ResultInvalidAddress());
|
||||
const KMemoryRegion *region = KMemoryLayout::Find(phys_addr);
|
||||
R_UNLESS(region != nullptr, svc::ResultInvalidAddress());
|
||||
|
||||
MESOSPHERE_ASSERT(region_it->Contains(GetInteger(phys_addr)));
|
||||
MESOSPHERE_ASSERT(region->Contains(GetInteger(phys_addr)));
|
||||
|
||||
/* Ensure that the region is mappable. */
|
||||
const bool is_rw = perm == KMemoryPermission_UserReadWrite;
|
||||
do {
|
||||
while (true) {
|
||||
/* Check that the region exists. */
|
||||
R_UNLESS(region != nullptr, svc::ResultInvalidAddress());
|
||||
|
||||
/* Check the region attributes. */
|
||||
R_UNLESS(!region_it->IsDerivedFrom(KMemoryRegionType_Dram), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region_it->HasTypeAttribute(KMemoryRegionAttr_UserReadOnly) || !is_rw, svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region_it->HasTypeAttribute(KMemoryRegionAttr_NoUserMap), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region->IsDerivedFrom(KMemoryRegionType_Dram), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region->HasTypeAttribute(KMemoryRegionAttr_UserReadOnly) || !is_rw, svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region->HasTypeAttribute(KMemoryRegionAttr_NoUserMap), svc::ResultInvalidAddress());
|
||||
|
||||
/* Check if we're done. */
|
||||
if (GetInteger(last) <= region_it->GetLastAddress()) {
|
||||
if (GetInteger(last) <= region->GetLastAddress()) {
|
||||
break;
|
||||
}
|
||||
|
||||
/* Advance. */
|
||||
region_it++;
|
||||
} while (region_it != end_it);
|
||||
region = region->GetNext();
|
||||
};
|
||||
|
||||
/* Lock the table. */
|
||||
KScopedLightLock lk(this->general_lock);
|
||||
|
@ -1660,18 +1662,17 @@ namespace ams::kern {
|
|||
const size_t region_num_pages = region_size / PageSize;
|
||||
|
||||
/* Locate the memory region. */
|
||||
auto region_it = KMemoryLayout::FindContainingRegion(phys_addr);
|
||||
const auto end_it = KMemoryLayout::GetEnd(phys_addr);
|
||||
R_UNLESS(region_it != end_it, svc::ResultInvalidAddress());
|
||||
const KMemoryRegion *region = KMemoryLayout::Find(phys_addr);
|
||||
R_UNLESS(region != nullptr, svc::ResultInvalidAddress());
|
||||
|
||||
MESOSPHERE_ASSERT(region_it->Contains(GetInteger(phys_addr)));
|
||||
R_UNLESS(GetInteger(last) <= region_it->GetLastAddress(), svc::ResultInvalidAddress());
|
||||
MESOSPHERE_ASSERT(region->Contains(GetInteger(phys_addr)));
|
||||
R_UNLESS(GetInteger(last) <= region->GetLastAddress(), svc::ResultInvalidAddress());
|
||||
|
||||
/* Check the region attributes. */
|
||||
const bool is_rw = perm == KMemoryPermission_UserReadWrite;
|
||||
R_UNLESS( region_it->IsDerivedFrom(KMemoryRegionType_Dram), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region_it->HasTypeAttribute(KMemoryRegionAttr_NoUserMap), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region_it->HasTypeAttribute(KMemoryRegionAttr_UserReadOnly) || !is_rw, svc::ResultInvalidAddress());
|
||||
R_UNLESS( region->IsDerivedFrom(KMemoryRegionType_Dram), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region->HasTypeAttribute(KMemoryRegionAttr_NoUserMap), svc::ResultInvalidAddress());
|
||||
R_UNLESS(!region->HasTypeAttribute(KMemoryRegionAttr_UserReadOnly) || !is_rw, svc::ResultInvalidAddress());
|
||||
|
||||
/* Lock the table. */
|
||||
KScopedLightLock lk(this->general_lock);
|
||||
|
@ -1716,12 +1717,11 @@ namespace ams::kern {
|
|||
|
||||
Result KPageTableBase::MapRegion(KMemoryRegionType region_type, KMemoryPermission perm) {
|
||||
/* Get the memory region. */
|
||||
auto &tree = KMemoryLayout::GetPhysicalMemoryRegionTree();
|
||||
auto it = tree.TryFindFirstDerivedRegion(region_type);
|
||||
R_UNLESS(it != tree.end(), svc::ResultOutOfRange());
|
||||
const KMemoryRegion *region = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerived(region_type);
|
||||
R_UNLESS(region != nullptr, svc::ResultOutOfRange());
|
||||
|
||||
/* Map the region. */
|
||||
R_TRY_CATCH(this->MapStatic(it->GetAddress(), it->GetSize(), perm)) {
|
||||
R_TRY_CATCH(this->MapStatic(region->GetAddress(), region->GetSize(), perm)) {
|
||||
R_CONVERT(svc::ResultInvalidAddress, svc::ResultOutOfRange())
|
||||
} R_END_TRY_CATCH;
|
||||
|
||||
|
|
|
@ -139,7 +139,7 @@ namespace ams::kern {
|
|||
PrintMemoryRegion(" Misc", KMemoryLayout::GetKernelMiscRegionExtents());
|
||||
PrintMemoryRegion(" Slab", KMemoryLayout::GetKernelSlabRegionExtents());
|
||||
PrintMemoryRegion(" CoreLocalRegion", KMemoryLayout::GetCoreLocalRegion());
|
||||
PrintMemoryRegion(" LinearRegion", KMemoryLayout::GetLinearRegionExtents());
|
||||
PrintMemoryRegion(" LinearRegion", KMemoryLayout::GetLinearRegionVirtualExtents());
|
||||
MESOSPHERE_LOG("\n");
|
||||
|
||||
MESOSPHERE_LOG("Physical Memory Layout\n");
|
||||
|
|
|
@ -80,21 +80,14 @@ namespace ams::kern::svc {
|
|||
R_UNLESS(phys_addr < PageSize, svc::ResultNotFound());
|
||||
|
||||
/* Try to find the memory region. */
|
||||
const KMemoryRegion *region;
|
||||
switch (static_cast<ams::svc::MemoryRegionType>(phys_addr)) {
|
||||
case ams::svc::MemoryRegionType_KernelTraceBuffer:
|
||||
region = KMemoryLayout::TryGetKernelTraceBufferRegion();
|
||||
break;
|
||||
case ams::svc::MemoryRegionType_OnMemoryBootImage:
|
||||
region = KMemoryLayout::TryGetOnMemoryBootImageRegion();
|
||||
break;
|
||||
case ams::svc::MemoryRegionType_DTB:
|
||||
region = KMemoryLayout::TryGetDTBRegion();
|
||||
break;
|
||||
default:
|
||||
region = nullptr;
|
||||
break;
|
||||
}
|
||||
const KMemoryRegion * const region = [] ALWAYS_INLINE_LAMBDA (ams::svc::MemoryRegionType type) -> const KMemoryRegion * {
|
||||
switch (type) {
|
||||
case ams::svc::MemoryRegionType_KernelTraceBuffer: return KMemoryLayout::GetPhysicalKernelTraceBufferRegion();
|
||||
case ams::svc::MemoryRegionType_OnMemoryBootImage: return KMemoryLayout::GetPhysicalOnMemoryBootImageRegion();
|
||||
case ams::svc::MemoryRegionType_DTB: return KMemoryLayout::GetPhysicalDTBRegion();
|
||||
default: return nullptr;
|
||||
}
|
||||
}(static_cast<ams::svc::MemoryRegionType>(phys_addr));
|
||||
|
||||
/* Ensure that we found the region. */
|
||||
R_UNLESS(region != nullptr, svc::ResultNotFound());
|
||||
|
|
|
@ -114,10 +114,10 @@ namespace ams::kern::init {
|
|||
InitializeSlabResourceCounts();
|
||||
|
||||
/* Insert the root region for the virtual memory tree, from which all other regions will derive. */
|
||||
KMemoryLayout::GetVirtualMemoryRegionTree().insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(KernelVirtualAddressSpaceBase, KernelVirtualAddressSpaceSize, 0, 0));
|
||||
KMemoryLayout::GetVirtualMemoryRegionTree().InsertDirectly(KernelVirtualAddressSpaceBase, KernelVirtualAddressSpaceSize);
|
||||
|
||||
/* Insert the root region for the physical memory tree, from which all other regions will derive. */
|
||||
KMemoryLayout::GetPhysicalMemoryRegionTree().insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(KernelPhysicalAddressSpaceBase, KernelPhysicalAddressSpaceSize, 0, 0));
|
||||
KMemoryLayout::GetPhysicalMemoryRegionTree().InsertDirectly(KernelPhysicalAddressSpaceBase, KernelPhysicalAddressSpaceSize);
|
||||
|
||||
/* Save start and end for ease of use. */
|
||||
const uintptr_t code_start_virt_addr = reinterpret_cast<uintptr_t>(_start);
|
||||
|
@ -294,7 +294,10 @@ namespace ams::kern::init {
|
|||
const uintptr_t region_virt_addr = region.GetAddress() + linear_region_phys_to_virt_diff;
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(region_virt_addr, region.GetSize(), GetTypeForVirtualLinearMapping(region.GetType())));
|
||||
region.SetPairAddress(region_virt_addr);
|
||||
KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(region_virt_addr)->SetPairAddress(region.GetAddress());
|
||||
|
||||
KMemoryRegion *virt_region = KMemoryLayout::GetVirtualMemoryRegionTree().FindModifiable(region_virt_addr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(virt_region != nullptr);
|
||||
virt_region->SetPairAddress(region.GetAddress());
|
||||
}
|
||||
|
||||
/* Map the last block, which we may have skipped. */
|
||||
|
|
Loading…
Reference in a new issue