diff --git a/libraries/libmesosphere/include/mesosphere/kern_k_memory_layout.hpp b/libraries/libmesosphere/include/mesosphere/kern_k_memory_layout.hpp index 654bc88a9..f8bfcb59b 100644 --- a/libraries/libmesosphere/include/mesosphere/kern_k_memory_layout.hpp +++ b/libraries/libmesosphere/include/mesosphere/kern_k_memory_layout.hpp @@ -112,17 +112,17 @@ namespace ams::kern { } } - class KMemoryBlock : public util::IntrusiveRedBlackTreeBaseNode { - NON_COPYABLE(KMemoryBlock); - NON_MOVEABLE(KMemoryBlock); + class KMemoryRegion : public util::IntrusiveRedBlackTreeBaseNode { + NON_COPYABLE(KMemoryRegion); + NON_MOVEABLE(KMemoryRegion); private: uintptr_t address; uintptr_t pair_address; - size_t block_size; + size_t region_size; u32 attributes; u32 type_id; public: - static constexpr ALWAYS_INLINE int Compare(const KMemoryBlock &lhs, const KMemoryBlock &rhs) { + static constexpr ALWAYS_INLINE int Compare(const KMemoryRegion &lhs, const KMemoryRegion &rhs) { if (lhs.GetAddress() < rhs.GetAddress()) { return -1; } else if (lhs.GetLastAddress() > rhs.GetLastAddress()) { @@ -132,13 +132,13 @@ namespace ams::kern { } } public: - constexpr ALWAYS_INLINE KMemoryBlock() : address(0), pair_address(0), block_size(0), attributes(0), type_id(0) { /* ... */ } - constexpr ALWAYS_INLINE KMemoryBlock(uintptr_t a, size_t bl, uintptr_t p, u32 r, u32 t) : - address(a), pair_address(p), block_size(bl), attributes(r), type_id(t) + constexpr ALWAYS_INLINE KMemoryRegion() : address(0), pair_address(0), region_size(0), attributes(0), type_id(0) { /* ... */ } + constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, uintptr_t p, u32 r, u32 t) : + address(a), pair_address(p), region_size(rs), attributes(r), type_id(t) { /* ... */ } - constexpr ALWAYS_INLINE KMemoryBlock(uintptr_t a, size_t bl, u32 r, u32 t) : KMemoryBlock(a, bl, std::numeric_limits::max(), r, t) { /* ... */ } + constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, u32 r, u32 t) : KMemoryRegion(a, rs, std::numeric_limits::max(), r, t) { /* ... */ } constexpr ALWAYS_INLINE uintptr_t GetAddress() const { return this->address; @@ -149,7 +149,7 @@ namespace ams::kern { } constexpr ALWAYS_INLINE size_t GetSize() const { - return this->block_size; + return this->region_size; } constexpr ALWAYS_INLINE uintptr_t GetEndAddress() const { @@ -197,16 +197,16 @@ namespace ams::kern { this->type_id |= attr; } }; - static_assert(std::is_trivially_destructible::value); + static_assert(std::is_trivially_destructible::value); - class KMemoryBlockTree { + class KMemoryRegionTree { public: struct DerivedRegionExtents { - const KMemoryBlock *first_block; - const KMemoryBlock *last_block; + const KMemoryRegion *first_region; + const KMemoryRegion *last_region; }; private: - using TreeType = util::IntrusiveRedBlackTreeBaseTraits::TreeType; + using TreeType = util::IntrusiveRedBlackTreeBaseTraits::TreeType; using value_type = TreeType::value_type; using size_type = TreeType::size_type; using difference_type = TreeType::difference_type; @@ -219,17 +219,17 @@ namespace ams::kern { private: TreeType tree; public: - constexpr ALWAYS_INLINE KMemoryBlockTree() : tree() { /* ... */ } + constexpr ALWAYS_INLINE KMemoryRegionTree() : tree() { /* ... */ } public: - iterator FindContainingBlock(uintptr_t address) { - auto it = this->find(KMemoryBlock(address, 1, 0, 0)); + iterator FindContainingRegion(uintptr_t address) { + auto it = this->find(KMemoryRegion(address, 1, 0, 0)); MESOSPHERE_INIT_ABORT_UNLESS(it != this->end()); MESOSPHERE_INIT_ABORT_UNLESS(it->Contains(address)); return it; } - iterator FindFirstBlockByTypeAttr(u32 type_id, u32 attr = 0) { + iterator FindFirstRegionByTypeAttr(u32 type_id, u32 attr = 0) { for (auto it = this->begin(); it != this->end(); it++) { if (it->GetType() == type_id && it->GetAttributes() == attr) { return it; @@ -238,7 +238,7 @@ namespace ams::kern { MESOSPHERE_INIT_ABORT(); } - iterator FindFirstBlockByType(u32 type_id) { + iterator FindFirstRegionByType(u32 type_id) { for (auto it = this->begin(); it != this->end(); it++) { if (it->GetType() == type_id) { return it; @@ -247,7 +247,7 @@ namespace ams::kern { MESOSPHERE_INIT_ABORT(); } - iterator FindFirstDerivedBlock(u32 type_id) { + iterator FindFirstDerivedRegion(u32 type_id) { for (auto it = this->begin(); it != this->end(); it++) { if (it->IsDerivedFrom(type_id)) { return it; @@ -258,19 +258,19 @@ namespace ams::kern { DerivedRegionExtents GetDerivedRegionExtents(u32 type_id) { - DerivedRegionExtents extents = { .first_block = nullptr, .last_block = nullptr }; + DerivedRegionExtents extents = { .first_region = nullptr, .last_region = nullptr }; for (auto it = this->cbegin(); it != this->cend(); it++) { if (it->IsDerivedFrom(type_id)) { - if (extents.first_block == nullptr) { - extents.first_block = std::addressof(*it); + if (extents.first_region == nullptr) { + extents.first_region = std::addressof(*it); } - extents.last_block = std::addressof(*it); + extents.last_region = std::addressof(*it); } } - MESOSPHERE_INIT_ABORT_UNLESS(extents.first_block != nullptr); - MESOSPHERE_INIT_ABORT_UNLESS(extents.last_block != nullptr); + MESOSPHERE_INIT_ABORT_UNLESS(extents.first_region != nullptr); + MESOSPHERE_INIT_ABORT_UNLESS(extents.last_region != nullptr); return extents; } @@ -354,30 +354,30 @@ namespace ams::kern { } }; - class KMemoryBlockAllocator { - NON_COPYABLE(KMemoryBlockAllocator); - NON_MOVEABLE(KMemoryBlockAllocator); + class KMemoryRegionAllocator { + NON_COPYABLE(KMemoryRegionAllocator); + NON_MOVEABLE(KMemoryRegionAllocator); public: - static constexpr size_t MaxMemoryBlocks = 1000; + static constexpr size_t MaxMemoryRegions = 1000; friend class KMemoryLayout; private: - KMemoryBlock block_heap[MaxMemoryBlocks]; - size_t num_blocks; + KMemoryRegion region_heap[MaxMemoryRegions]; + size_t num_regions; private: - constexpr ALWAYS_INLINE KMemoryBlockAllocator() : block_heap(), num_blocks() { /* ... */ } + constexpr ALWAYS_INLINE KMemoryRegionAllocator() : region_heap(), num_regions() { /* ... */ } public: - ALWAYS_INLINE KMemoryBlock *Allocate() { + ALWAYS_INLINE KMemoryRegion *Allocate() { /* Ensure we stay within the bounds of our heap. */ - MESOSPHERE_INIT_ABORT_UNLESS(this->num_blocks < MaxMemoryBlocks); + MESOSPHERE_INIT_ABORT_UNLESS(this->num_regions < MaxMemoryRegions); - return &this->block_heap[this->num_blocks++]; + return &this->region_heap[this->num_regions++]; } template - ALWAYS_INLINE KMemoryBlock *Create(Args&&... args) { - KMemoryBlock *block = this->Allocate(); - new (block) KMemoryBlock(std::forward(args)...); - return block; + ALWAYS_INLINE KMemoryRegion *Create(Args&&... args) { + KMemoryRegion *region = this->Allocate(); + new (region) KMemoryRegion(std::forward(args)...); + return region; } }; @@ -385,17 +385,17 @@ namespace ams::kern { private: static /* constinit */ inline uintptr_t s_linear_phys_to_virt_diff; static /* constinit */ inline uintptr_t s_linear_virt_to_phys_diff; - static /* constinit */ inline KMemoryBlockAllocator s_block_allocator; - static /* constinit */ inline KMemoryBlockTree s_virtual_tree; - static /* constinit */ inline KMemoryBlockTree s_physical_tree; - static /* constinit */ inline KMemoryBlockTree s_virtual_linear_tree; - static /* constinit */ inline KMemoryBlockTree s_physical_linear_tree; + static /* constinit */ inline KMemoryRegionAllocator s_region_allocator; + static /* constinit */ inline KMemoryRegionTree s_virtual_tree; + static /* constinit */ inline KMemoryRegionTree s_physical_tree; + static /* constinit */ inline KMemoryRegionTree s_virtual_linear_tree; + static /* constinit */ inline KMemoryRegionTree s_physical_linear_tree; public: - static ALWAYS_INLINE KMemoryBlockAllocator &GetMemoryBlockAllocator() { return s_block_allocator; } - static ALWAYS_INLINE KMemoryBlockTree &GetVirtualMemoryBlockTree() { return s_virtual_tree; } - static ALWAYS_INLINE KMemoryBlockTree &GetPhysicalMemoryBlockTree() { return s_physical_tree; } - static ALWAYS_INLINE KMemoryBlockTree &GetVirtualLinearMemoryBlockTree() { return s_virtual_linear_tree; } - static ALWAYS_INLINE KMemoryBlockTree &GetPhysicalLinearMemoryBlockTree() { return s_physical_linear_tree; } + static ALWAYS_INLINE KMemoryRegionAllocator &GetMemoryRegionAllocator() { return s_region_allocator; } + static ALWAYS_INLINE KMemoryRegionTree &GetVirtualMemoryRegionTree() { return s_virtual_tree; } + static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalMemoryRegionTree() { return s_physical_tree; } + static ALWAYS_INLINE KMemoryRegionTree &GetVirtualLinearMemoryRegionTree() { return s_virtual_linear_tree; } + static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalLinearMemoryRegionTree() { return s_physical_linear_tree; } static ALWAYS_INLINE KVirtualAddress GetLinearVirtualAddress(KPhysicalAddress address) { return GetInteger(address) + s_linear_phys_to_virt_diff; @@ -406,46 +406,46 @@ namespace ams::kern { } static NOINLINE KVirtualAddress GetMainStackTopAddress(s32 core_id) { - return GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelMiscMainStack, static_cast(core_id))->GetEndAddress(); + return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscMainStack, static_cast(core_id))->GetEndAddress(); } static NOINLINE KVirtualAddress GetIdleStackTopAddress(s32 core_id) { - return GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelMiscIdleStack, static_cast(core_id))->GetEndAddress(); + return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscIdleStack, static_cast(core_id))->GetEndAddress(); } static NOINLINE KVirtualAddress GetExceptionStackBottomAddress(s32 core_id) { - return GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast(core_id))->GetAddress(); + return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast(core_id))->GetAddress(); } static NOINLINE KVirtualAddress GetSlabRegionAddress() { - return GetVirtualMemoryBlockTree().FindFirstBlockByType(KMemoryRegionType_KernelSlab)->GetAddress(); + return GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_KernelSlab)->GetAddress(); } static NOINLINE KVirtualAddress GetCoreLocalRegionAddress() { - return GetVirtualMemoryBlockTree().FindFirstBlockByType(KMemoryRegionType_CoreLocal)->GetAddress(); + return GetVirtualMemoryRegionTree().FindFirstRegionByType(KMemoryRegionType_CoreLocal)->GetAddress(); } static NOINLINE KVirtualAddress GetInterruptDistributorAddress() { - return GetPhysicalMemoryBlockTree().FindFirstDerivedBlock(KMemoryRegionType_InterruptDistributor)->GetPairAddress(); + return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_InterruptDistributor)->GetPairAddress(); } static NOINLINE KVirtualAddress GetInterruptCpuInterfaceAddress() { - return GetPhysicalMemoryBlockTree().FindFirstDerivedBlock(KMemoryRegionType_InterruptCpuInterface)->GetPairAddress(); + return GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_InterruptCpuInterface)->GetPairAddress(); } - static void InitializeLinearMemoryBlockTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start); + static void InitializeLinearMemoryRegionTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start); }; namespace init { /* These should be generic, regardless of board. */ - void SetupCoreLocalRegionMemoryBlocks(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator); - void SetupPoolPartitionMemoryBlocks(); + void SetupCoreLocalRegionMemoryRegions(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator); + void SetupPoolPartitionMemoryRegions(); /* These may be implemented in a board-specific manner. */ - void SetupDevicePhysicalMemoryBlocks(); - void SetupDramPhysicalMemoryBlocks(); + void SetupDevicePhysicalMemoryRegions(); + void SetupDramPhysicalMemoryRegions(); } diff --git a/libraries/libmesosphere/source/board/nintendo/switch/kern_k_memory_layout.board.nintendo_switch.cpp b/libraries/libmesosphere/source/board/nintendo/switch/kern_k_memory_layout.board.nintendo_switch.cpp index 5dd3096a7..816de9a8f 100644 --- a/libraries/libmesosphere/source/board/nintendo/switch/kern_k_memory_layout.board.nintendo_switch.cpp +++ b/libraries/libmesosphere/source/board/nintendo/switch/kern_k_memory_layout.board.nintendo_switch.cpp @@ -26,29 +26,29 @@ namespace ams::kern { namespace init { - void SetupDevicePhysicalMemoryBlocks() { + void SetupDevicePhysicalMemoryRegions() { /* TODO: Give these constexpr defines somewhere? */ - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x70006000, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x70019000, 0x1000, KMemoryRegionType_MemoryController | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7001C000, 0x1000, KMemoryRegionType_MemoryController0 | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7001D000, 0x1000, KMemoryRegionType_MemoryController1 | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x7000E400, 0xC00, KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50040000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50041000, 0x1000, KMemoryRegionType_InterruptDistributor | KMemoryRegionAttr_ShouldKernelMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50042000, 0x1000, KMemoryRegionType_InterruptCpuInterface | KMemoryRegionAttr_ShouldKernelMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x50043000, 0x1D000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x6000F000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(0x6001DC00, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006000, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70019000, 0x1000, KMemoryRegionType_MemoryController | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7001C000, 0x1000, KMemoryRegionType_MemoryController0 | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7001D000, 0x1000, KMemoryRegionType_MemoryController1 | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E400, 0xC00, KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50040000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50041000, 0x1000, KMemoryRegionType_InterruptDistributor | KMemoryRegionAttr_ShouldKernelMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50042000, 0x1000, KMemoryRegionType_InterruptCpuInterface | KMemoryRegionAttr_ShouldKernelMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x50043000, 0x1D000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x6000F000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x6001DC00, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); } - void SetupDramPhysicalMemoryBlocks() { + void SetupDramPhysicalMemoryRegions() { const size_t intended_memory_size = KSystemControl::Init::GetIntendedMemorySize(); const KPhysicalAddress physical_memory_base_address = KSystemControl::Init::GetKernelPhysicalBaseAddress(DramPhysicalAddress); /* Insert blocks into the tree. */ - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(physical_memory_base_address), intended_memory_size, KMemoryRegionType_Dram)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(physical_memory_base_address), ReservedEarlyDramSize, KMemoryRegionType_DramReservedEarly)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(physical_memory_base_address), intended_memory_size, KMemoryRegionType_Dram)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(physical_memory_base_address), ReservedEarlyDramSize, KMemoryRegionType_DramReservedEarly)); } } diff --git a/libraries/libmesosphere/source/init/kern_init_slab_setup.cpp b/libraries/libmesosphere/source/init/kern_init_slab_setup.cpp index a4dcb37f3..e6680971f 100644 --- a/libraries/libmesosphere/source/init/kern_init_slab_setup.cpp +++ b/libraries/libmesosphere/source/init/kern_init_slab_setup.cpp @@ -98,7 +98,7 @@ namespace ams::kern::init { KVirtualAddress start = util::AlignUp(GetInteger(address), alignof(T)); if (size > 0) { - MESOSPHERE_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().FindContainingBlock(GetInteger(start) + size - 1)->IsDerivedFrom(KMemoryRegionType_KernelSlab)); + MESOSPHERE_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(GetInteger(start) + size - 1)->IsDerivedFrom(KMemoryRegionType_KernelSlab)); T::InitializeSlabHeap(GetVoidPointer(start), size); } diff --git a/libraries/libmesosphere/source/kern_k_memory_layout.cpp b/libraries/libmesosphere/source/kern_k_memory_layout.cpp index 3f185e6fc..bf3578489 100644 --- a/libraries/libmesosphere/source/kern_k_memory_layout.cpp +++ b/libraries/libmesosphere/source/kern_k_memory_layout.cpp @@ -17,19 +17,19 @@ namespace ams::kern { - bool KMemoryBlockTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) { - /* Locate the memory block that contains the address. */ - auto it = this->FindContainingBlock(address); + bool KMemoryRegionTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) { + /* Locate the memory region that contains the address. */ + auto it = this->FindContainingRegion(address); /* We require that the old attr is correct. */ if (it->GetAttributes() != old_attr) { return false; } - /* We further require that the block can be split from the old block. */ - const uintptr_t inserted_block_end = address + size; - const uintptr_t inserted_block_last = inserted_block_end - 1; - if (it->GetLastAddress() < inserted_block_last) { + /* We further require that the region can be split from the old region. */ + const uintptr_t inserted_region_end = address + size; + const uintptr_t inserted_region_last = inserted_region_end - 1; + if (it->GetLastAddress() < inserted_region_last) { return false; } @@ -38,8 +38,8 @@ namespace ams::kern { return false; } - /* Cache information from the block before we remove it. */ - KMemoryBlock *cur_block = std::addressof(*it); + /* Cache information from the region before we remove it. */ + KMemoryRegion *cur_region = std::addressof(*it); const uintptr_t old_address = it->GetAddress(); const size_t old_size = it->GetSize(); const uintptr_t old_end = old_address + old_size; @@ -47,39 +47,39 @@ namespace ams::kern { const uintptr_t old_pair = it->GetPairAddress(); const u32 old_type = it->GetType(); - /* Erase the existing block from the tree. */ + /* Erase the existing region from the tree. */ this->erase(it); - /* If we need to insert a block before the region, do so. */ + /* If we need to insert a region before the region, do so. */ if (old_address != address) { - new (cur_block) KMemoryBlock(old_address, address - old_address, old_pair, old_attr, old_type); - this->insert(*cur_block); - cur_block = KMemoryLayout::GetMemoryBlockAllocator().Allocate(); + new (cur_region) KMemoryRegion(old_address, address - old_address, old_pair, old_attr, old_type); + this->insert(*cur_region); + cur_region = KMemoryLayout::GetMemoryRegionAllocator().Allocate(); } - /* Insert a new block. */ + /* Insert a new region. */ const uintptr_t new_pair = (old_pair != std::numeric_limits::max()) ? old_pair + (address - old_address) : old_pair; - new (cur_block) KMemoryBlock(address, size, new_pair, new_attr, type_id); - this->insert(*cur_block); + new (cur_region) KMemoryRegion(address, size, new_pair, new_attr, type_id); + this->insert(*cur_region); - /* If we need to insert a block after the region, do so. */ - if (old_last != inserted_block_last) { - const uintptr_t after_pair = (old_pair != std::numeric_limits::max()) ? old_pair + (inserted_block_end - old_address) : old_pair; - this->insert(*KMemoryLayout::GetMemoryBlockAllocator().Create(inserted_block_end, old_end - inserted_block_end, after_pair, old_attr, old_type)); + /* If we need to insert a region after the region, do so. */ + if (old_last != inserted_region_last) { + const uintptr_t after_pair = (old_pair != std::numeric_limits::max()) ? old_pair + (inserted_region_end - old_address) : old_pair; + this->insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(inserted_region_end, old_end - inserted_region_end, after_pair, old_attr, old_type)); } return true; } - KVirtualAddress KMemoryBlockTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) { + KVirtualAddress KMemoryRegionTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) { /* We want to find the total extents of the type id. */ const auto extents = this->GetDerivedRegionExtents(type_id); /* Ensure that our alignment is correct. */ - MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(extents.first_block->GetAddress(), alignment)); + MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(extents.first_region->GetAddress(), alignment)); - const uintptr_t first_address = extents.first_block->GetAddress(); - const uintptr_t last_address = extents.last_block->GetLastAddress(); + const uintptr_t first_address = extents.first_region->GetAddress(); + const uintptr_t last_address = extents.last_region->GetLastAddress(); while (true) { const uintptr_t candidate = util::AlignDown(KSystemControl::Init::GenerateRandomRange(first_address, last_address), alignment); @@ -96,38 +96,38 @@ namespace ams::kern { continue; } - /* Locate the candidate block, and ensure it fits. */ - const KMemoryBlock *candidate_block = std::addressof(*this->FindContainingBlock(candidate)); - if (candidate_last > candidate_block->GetLastAddress()) { + /* Locate the candidate region, and ensure it fits. */ + const KMemoryRegion *candidate_region = std::addressof(*this->FindContainingRegion(candidate)); + if (candidate_last > candidate_region->GetLastAddress()) { continue; } - /* Ensure that the block has the correct type id. */ - if (candidate_block->GetType() != type_id) + /* Ensure that the region has the correct type id. */ + if (candidate_region->GetType() != type_id) continue; return candidate; } } - void KMemoryLayout::InitializeLinearMemoryBlockTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start) { + void KMemoryLayout::InitializeLinearMemoryRegionTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start) { /* Set static differences. */ s_linear_phys_to_virt_diff = GetInteger(linear_virtual_start) - GetInteger(aligned_linear_phys_start); s_linear_virt_to_phys_diff = GetInteger(aligned_linear_phys_start) - GetInteger(linear_virtual_start); /* Initialize linear trees. */ - for (auto &block : GetPhysicalMemoryBlockTree()) { - if (!block.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { + for (auto ®ion : GetPhysicalMemoryRegionTree()) { + if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { continue; } - GetPhysicalLinearMemoryBlockTree().insert(*GetMemoryBlockAllocator().Create(block.GetAddress(), block.GetSize(), block.GetAttributes(), block.GetType())); + GetPhysicalLinearMemoryRegionTree().insert(*GetMemoryRegionAllocator().Create(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType())); } - for (auto &block : GetVirtualMemoryBlockTree()) { - if (!block.IsDerivedFrom(KMemoryRegionType_Dram)) { + for (auto ®ion : GetVirtualMemoryRegionTree()) { + if (!region.IsDerivedFrom(KMemoryRegionType_Dram)) { continue; } - GetVirtualLinearMemoryBlockTree().insert(*GetMemoryBlockAllocator().Create(block.GetAddress(), block.GetSize(), block.GetAttributes(), block.GetType())); + GetVirtualLinearMemoryRegionTree().insert(*GetMemoryRegionAllocator().Create(region.GetAddress(), region.GetSize(), region.GetAttributes(), region.GetType())); } } @@ -149,17 +149,17 @@ namespace ams::kern { KVirtualAddress GetCoreLocalRegionVirtualAddress() { while (true) { - const uintptr_t candidate_start = GetInteger(KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(CoreLocalRegionSizeWithGuards, CoreLocalRegionAlign, KMemoryRegionType_None)); + const uintptr_t candidate_start = GetInteger(KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(CoreLocalRegionSizeWithGuards, CoreLocalRegionAlign, KMemoryRegionType_None)); const uintptr_t candidate_end = candidate_start + CoreLocalRegionSizeWithGuards; const uintptr_t candidate_last = candidate_end - 1; - const KMemoryBlock *containing_block = std::addressof(*KMemoryLayout::GetVirtualMemoryBlockTree().FindContainingBlock(candidate_start)); + const KMemoryRegion *containing_region = std::addressof(*KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(candidate_start)); - if (candidate_last > containing_block->GetLastAddress()) { + if (candidate_last > containing_region->GetLastAddress()) { continue; } - if (containing_block->GetType() != KMemoryRegionType_None) { + if (containing_region->GetType() != KMemoryRegionType_None) { continue; } @@ -167,11 +167,11 @@ namespace ams::kern { continue; } - if (containing_block->GetAddress() > util::AlignDown(candidate_start, CoreLocalRegionBoundsAlign)) { + if (containing_region->GetAddress() > util::AlignDown(candidate_start, CoreLocalRegionBoundsAlign)) { continue; } - if (util::AlignUp(candidate_last, CoreLocalRegionBoundsAlign) - 1 > containing_block->GetLastAddress()) { + if (util::AlignUp(candidate_last, CoreLocalRegionBoundsAlign) - 1 > containing_region->GetLastAddress()) { continue; } @@ -180,17 +180,17 @@ namespace ams::kern { } - void InsertPoolPartitionBlockIntoBothTrees(size_t start, size_t size, KMemoryRegionType phys_type, KMemoryRegionType virt_type, u32 &cur_attr) { + void InsertPoolPartitionRegionIntoBothTrees(size_t start, size_t size, KMemoryRegionType phys_type, KMemoryRegionType virt_type, u32 &cur_attr) { const u32 attr = cur_attr++; - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(start, size, phys_type, attr)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(phys_type, attr)->GetPairAddress(), size, virt_type, attr)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(start, size, phys_type, attr)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(phys_type, attr)->GetPairAddress(), size, virt_type, attr)); } } - void SetupCoreLocalRegionMemoryBlocks(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator) { + void SetupCoreLocalRegionMemoryRegions(KInitialPageTable &page_table, KInitialPageAllocator &page_allocator) { const KVirtualAddress core_local_virt_start = GetCoreLocalRegionVirtualAddress(); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(core_local_virt_start), CoreLocalRegionSize, KMemoryRegionType_CoreLocal)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(core_local_virt_start), CoreLocalRegionSize, KMemoryRegionType_CoreLocal)); /* Allocate a page for each core. */ KPhysicalAddress core_local_region_start_phys[cpu::NumCores] = {}; @@ -222,9 +222,9 @@ namespace ams::kern { StoreInitArguments(); } - void SetupPoolPartitionMemoryBlocks() { + void SetupPoolPartitionMemoryRegions() { /* Start by identifying the extents of the DRAM memory region. */ - const auto dram_extents = KMemoryLayout::GetPhysicalMemoryBlockTree().GetDerivedRegionExtents(KMemoryRegionType_Dram); + const auto dram_extents = KMemoryLayout::GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram); /* Get Application and Applet pool sizes. */ const size_t application_pool_size = KSystemControl::Init::GetApplicationPoolSize(); @@ -232,40 +232,40 @@ namespace ams::kern { const size_t unsafe_system_pool_min_size = KSystemControl::Init::GetMinimumNonSecureSystemPoolSize(); /* Find the start of the kernel DRAM region. */ - const uintptr_t kernel_dram_start = KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstDerivedBlock(KMemoryRegionType_DramKernel)->GetAddress(); + const uintptr_t kernel_dram_start = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerivedRegion(KMemoryRegionType_DramKernel)->GetAddress(); MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(kernel_dram_start, CarveoutAlignment)); /* Find the start of the pool partitions region. */ - const uintptr_t pool_partitions_start = KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_DramPoolPartition)->GetAddress(); + const uintptr_t pool_partitions_start = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramPoolPartition)->GetAddress(); /* Decide on starting addresses for our pools. */ - const uintptr_t application_pool_start = dram_extents.last_block->GetEndAddress() - application_pool_size; + const uintptr_t application_pool_start = dram_extents.last_region->GetEndAddress() - application_pool_size; const uintptr_t applet_pool_start = application_pool_start - applet_pool_size; const uintptr_t unsafe_system_pool_start = std::min(kernel_dram_start + CarveoutSizeMax, util::AlignDown(applet_pool_start - unsafe_system_pool_min_size, CarveoutAlignment)); const size_t unsafe_system_pool_size = applet_pool_start - unsafe_system_pool_start; /* We want to arrange application pool depending on where the middle of dram is. */ - const uintptr_t dram_midpoint = (dram_extents.first_block->GetAddress() + dram_extents.last_block->GetEndAddress()) / 2; + const uintptr_t dram_midpoint = (dram_extents.first_region->GetAddress() + dram_extents.last_region->GetEndAddress()) / 2; u32 cur_pool_attr = 0; size_t total_overhead_size = 0; - if (dram_extents.last_block->GetEndAddress() <= dram_midpoint || dram_midpoint <= application_pool_start) { - InsertPoolPartitionBlockIntoBothTrees(application_pool_start, application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr); + if (dram_extents.last_region->GetEndAddress() <= dram_midpoint || dram_midpoint <= application_pool_start) { + InsertPoolPartitionRegionIntoBothTrees(application_pool_start, application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr); total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(application_pool_size); } else { const size_t first_application_pool_size = dram_midpoint - application_pool_start; const size_t second_application_pool_size = application_pool_start + application_pool_size - dram_midpoint; - InsertPoolPartitionBlockIntoBothTrees(application_pool_start, first_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr); - InsertPoolPartitionBlockIntoBothTrees(dram_midpoint, second_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr); + InsertPoolPartitionRegionIntoBothTrees(application_pool_start, first_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr); + InsertPoolPartitionRegionIntoBothTrees(dram_midpoint, second_application_pool_size, KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, cur_pool_attr); total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(first_application_pool_size); total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(second_application_pool_size); } /* Insert the applet pool. */ - InsertPoolPartitionBlockIntoBothTrees(applet_pool_start, applet_pool_size, KMemoryRegionType_DramAppletPool, KMemoryRegionType_VirtualDramAppletPool, cur_pool_attr); + InsertPoolPartitionRegionIntoBothTrees(applet_pool_start, applet_pool_size, KMemoryRegionType_DramAppletPool, KMemoryRegionType_VirtualDramAppletPool, cur_pool_attr); total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(applet_pool_size); /* Insert the nonsecure system pool. */ - InsertPoolPartitionBlockIntoBothTrees(unsafe_system_pool_start, unsafe_system_pool_size, KMemoryRegionType_DramSystemNonSecurePool, KMemoryRegionType_VirtualDramSystemNonSecurePool, cur_pool_attr); + InsertPoolPartitionRegionIntoBothTrees(unsafe_system_pool_start, unsafe_system_pool_size, KMemoryRegionType_DramSystemNonSecurePool, KMemoryRegionType_VirtualDramSystemNonSecurePool, cur_pool_attr); total_overhead_size += KMemoryManager::CalculateMetadataOverheadSize(unsafe_system_pool_size); /* Insert the metadata pool. */ @@ -273,11 +273,11 @@ namespace ams::kern { const uintptr_t metadata_pool_start = unsafe_system_pool_start - total_overhead_size; const size_t metadata_pool_size = total_overhead_size; u32 metadata_pool_attr = 0; - InsertPoolPartitionBlockIntoBothTrees(metadata_pool_start, metadata_pool_size, KMemoryRegionType_DramMetadataPool, KMemoryRegionType_VirtualDramMetadataPool, metadata_pool_attr); + InsertPoolPartitionRegionIntoBothTrees(metadata_pool_start, metadata_pool_size, KMemoryRegionType_DramMetadataPool, KMemoryRegionType_VirtualDramMetadataPool, metadata_pool_attr); /* Insert the system pool. */ const uintptr_t system_pool_size = metadata_pool_start - pool_partitions_start; - InsertPoolPartitionBlockIntoBothTrees(pool_partitions_start, system_pool_size, KMemoryRegionType_DramSystemPool, KMemoryRegionType_VirtualDramSystemPool, cur_pool_attr); + InsertPoolPartitionRegionIntoBothTrees(pool_partitions_start, system_pool_size, KMemoryRegionType_DramSystemPool, KMemoryRegionType_VirtualDramSystemPool, cur_pool_attr); } diff --git a/mesosphere/kernel/source/arch/arm64/init/kern_init_core.cpp b/mesosphere/kernel/source/arch/arm64/init/kern_init_core.cpp index 9f011c316..7ce13538c 100644 --- a/mesosphere/kernel/source/arch/arm64/init/kern_init_core.cpp +++ b/mesosphere/kernel/source/arch/arm64/init/kern_init_core.cpp @@ -49,9 +49,9 @@ namespace ams::kern::init { void MapStackForCore(KInitialPageTable &page_table, KMemoryRegionType type, u32 core_id) { constexpr size_t StackSize = PageSize; constexpr size_t StackAlign = PageSize; - const KVirtualAddress stack_start_virt = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(StackSize, StackAlign, KMemoryRegionType_KernelMisc, PageSize); + const KVirtualAddress stack_start_virt = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(StackSize, StackAlign, KMemoryRegionType_KernelMisc, PageSize); const KPhysicalAddress stack_start_phys = g_initial_page_allocator.Allocate(); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(stack_start_virt), StackSize, type, core_id)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(stack_start_virt), StackSize, type, core_id)); page_table.Map(stack_start_virt, StackSize, stack_start_phys, KernelRwDataAttribute, g_initial_page_allocator); } @@ -98,11 +98,11 @@ namespace ams::kern::init { /* Initialize the slab allocator counts. */ InitializeSlabResourceCounts(); - /* Insert the root block for the virtual memory tree, from which all other blocks will derive. */ - KMemoryLayout::GetVirtualMemoryBlockTree().insert(*KMemoryLayout::GetMemoryBlockAllocator().Create(KernelVirtualAddressSpaceBase, KernelVirtualAddressSpaceSize, 0, 0)); + /* Insert the root region for the virtual memory tree, from which all other regions will derive. */ + KMemoryLayout::GetVirtualMemoryRegionTree().insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(KernelVirtualAddressSpaceBase, KernelVirtualAddressSpaceSize, 0, 0)); - /* Insert the root block for the physical memory tree, from which all other blocks will derive. */ - KMemoryLayout::GetPhysicalMemoryBlockTree().insert(*KMemoryLayout::GetMemoryBlockAllocator().Create(KernelPhysicalAddressSpaceBase, KernelPhysicalAddressSpaceSize, 0, 0)); + /* Insert the root region for the physical memory tree, from which all other regions will derive. */ + KMemoryLayout::GetPhysicalMemoryRegionTree().insert(*KMemoryLayout::GetMemoryRegionAllocator().Create(KernelPhysicalAddressSpaceBase, KernelPhysicalAddressSpaceSize, 0, 0)); /* Save start and end for ease of use. */ const uintptr_t code_start_virt_addr = reinterpret_cast(_start); @@ -116,26 +116,26 @@ namespace ams::kern::init { if (!(kernel_region_start + KernelRegionSize - 1 <= KernelVirtualAddressSpaceLast)) { kernel_region_size = KernelVirtualAddressSpaceEnd - GetInteger(kernel_region_start); } - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(kernel_region_start), kernel_region_size, KMemoryRegionType_Kernel)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(kernel_region_start), kernel_region_size, KMemoryRegionType_Kernel)); /* Setup the code region. */ constexpr size_t CodeRegionAlign = PageSize; const KVirtualAddress code_region_start = util::AlignDown(code_start_virt_addr, CodeRegionAlign); const KVirtualAddress code_region_end = util::AlignUp(code_end_virt_addr, CodeRegionAlign); const size_t code_region_size = GetInteger(code_region_end) - GetInteger(code_region_start); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(code_region_start), code_region_size, KMemoryRegionType_KernelCode)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(code_region_start), code_region_size, KMemoryRegionType_KernelCode)); /* Setup the misc region. */ constexpr size_t MiscRegionSize = 32_MB; constexpr size_t MiscRegionAlign = KernelAslrAlignment; - const KVirtualAddress misc_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(MiscRegionSize, MiscRegionAlign, KMemoryRegionType_Kernel); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(misc_region_start), MiscRegionSize, KMemoryRegionType_KernelMisc)); + const KVirtualAddress misc_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(MiscRegionSize, MiscRegionAlign, KMemoryRegionType_Kernel); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(misc_region_start), MiscRegionSize, KMemoryRegionType_KernelMisc)); /* Setup the stack region. */ constexpr size_t StackRegionSize = 14_MB; constexpr size_t StackRegionAlign = KernelAslrAlignment; - const KVirtualAddress stack_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(stack_region_start), StackRegionSize, KMemoryRegionType_KernelStack)); + const KVirtualAddress stack_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(stack_region_start), StackRegionSize, KMemoryRegionType_KernelStack)); /* Decide if Kernel should have enlarged resource region (slab region + page table heap region). */ const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit(); @@ -152,66 +152,66 @@ namespace ams::kern::init { const KPhysicalAddress slab_end_phys_addr = slab_start_phys_addr + slab_region_size; constexpr size_t SlabRegionAlign = KernelAslrAlignment; const size_t slab_region_needed_size = util::AlignUp(GetInteger(code_end_phys_addr) + slab_region_size, SlabRegionAlign) - util::AlignDown(GetInteger(code_end_phys_addr), SlabRegionAlign); - const KVirtualAddress slab_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + (GetInteger(code_end_phys_addr) % SlabRegionAlign); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(slab_region_start), slab_region_size, KMemoryRegionType_KernelSlab)); + const KVirtualAddress slab_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + (GetInteger(code_end_phys_addr) % SlabRegionAlign); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(slab_region_start), slab_region_size, KMemoryRegionType_KernelSlab)); - /* Set the slab region's pair block. */ - KMemoryLayout::GetVirtualMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_KernelSlab)->SetPairAddress(GetInteger(slab_start_phys_addr)); + /* Set the slab region's pair region. */ + KMemoryLayout::GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelSlab)->SetPairAddress(GetInteger(slab_start_phys_addr)); /* Setup the temp region. */ constexpr size_t TempRegionSize = 128_MB; constexpr size_t TempRegionAlign = KernelAslrAlignment; - const KVirtualAddress temp_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegion(TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(temp_region_start), TempRegionSize, KMemoryRegionType_KernelTemp)); + const KVirtualAddress temp_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegion(TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(temp_region_start), TempRegionSize, KMemoryRegionType_KernelTemp)); /* Setup the Misc Unknown Debug region, if it's not zero. */ if (misc_unk_debug_phys_addr) { constexpr size_t MiscUnknownDebugRegionSize = PageSize; constexpr size_t MiscUnknownDebugRegionAlign = PageSize; - const KVirtualAddress misc_unk_debug_virt_addr = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(MiscUnknownDebugRegionSize, MiscUnknownDebugRegionAlign, KMemoryRegionType_KernelMisc, PageSize); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(misc_unk_debug_virt_addr), MiscUnknownDebugRegionSize, KMemoryRegionType_KernelMiscUnknownDebug)); + const KVirtualAddress misc_unk_debug_virt_addr = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(MiscUnknownDebugRegionSize, MiscUnknownDebugRegionAlign, KMemoryRegionType_KernelMisc, PageSize); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(misc_unk_debug_virt_addr), MiscUnknownDebugRegionSize, KMemoryRegionType_KernelMiscUnknownDebug)); ttbr1_table.Map(misc_unk_debug_virt_addr, MiscUnknownDebugRegionSize, misc_unk_debug_phys_addr, KernelRoDataAttribute, g_initial_page_allocator); } - /* Setup board-specific device physical blocks. */ - SetupDevicePhysicalMemoryBlocks(); + /* Setup board-specific device physical regions. */ + SetupDevicePhysicalMemoryRegions(); /* Automatically map in devices that have auto-map attributes. */ - for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) { - /* We only care about automatically-mapped blocks. */ - if (!block.IsDerivedFrom(KMemoryRegionType_KernelAutoMap)) { + for (auto ®ion : KMemoryLayout::GetPhysicalMemoryRegionTree()) { + /* We only care about automatically-mapped regions. */ + if (!region.IsDerivedFrom(KMemoryRegionType_KernelAutoMap)) { continue; } - /* If this block has already been mapped, no need to consider it. */ - if (block.HasTypeAttribute(KMemoryRegionAttr_DidKernelMap)) { + /* If this region has already been mapped, no need to consider it. */ + if (region.HasTypeAttribute(KMemoryRegionAttr_DidKernelMap)) { continue; } - /* Set the attribute to note we've mapped this block. */ - block.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap); + /* Set the attribute to note we've mapped this region. */ + region.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap); - /* Create a virtual pair block and insert it into the tree. */ - const KPhysicalAddress map_phys_addr = util::AlignDown(block.GetAddress(), PageSize); - const size_t map_size = util::AlignUp(block.GetEndAddress(), PageSize) - GetInteger(map_phys_addr); - const KVirtualAddress map_virt_addr = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(map_virt_addr), map_size, KMemoryRegionType_KernelMiscMappedDevice)); - block.SetPairAddress(GetInteger(map_virt_addr) + block.GetAddress() - GetInteger(map_phys_addr)); + /* Create a virtual pair region and insert it into the tree. */ + const KPhysicalAddress map_phys_addr = util::AlignDown(region.GetAddress(), PageSize); + const size_t map_size = util::AlignUp(region.GetEndAddress(), PageSize) - GetInteger(map_phys_addr); + const KVirtualAddress map_virt_addr = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(map_virt_addr), map_size, KMemoryRegionType_KernelMiscMappedDevice)); + region.SetPairAddress(GetInteger(map_virt_addr) + region.GetAddress() - GetInteger(map_phys_addr)); /* Map the page in to our page table. */ ttbr1_table.Map(map_virt_addr, map_size, map_phys_addr, KernelMmioAttribute, g_initial_page_allocator); } - /* Setup the basic DRAM blocks. */ - SetupDramPhysicalMemoryBlocks(); + /* Setup the basic DRAM regions. */ + SetupDramPhysicalMemoryRegions(); - /* Insert a physical block for the kernel code region. */ - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(code_start_phys_addr), (code_end_virt_addr - code_start_virt_addr), KMemoryRegionType_DramKernelCode)); - KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_DramKernelCode)->SetPairAddress(code_start_virt_addr); + /* Insert a physical region for the kernel code region. */ + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(code_start_phys_addr), (code_end_virt_addr - code_start_virt_addr), KMemoryRegionType_DramKernelCode)); + KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramKernelCode)->SetPairAddress(code_start_virt_addr); - /* Insert a physical block for the kernel slab region. */ - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(slab_start_phys_addr), slab_region_size, KMemoryRegionType_DramKernelSlab)); - KMemoryLayout::GetPhysicalMemoryBlockTree().FindFirstBlockByTypeAttr(KMemoryRegionType_DramKernelSlab)->SetPairAddress(GetInteger(slab_region_start)); + /* Insert a physical region for the kernel slab region. */ + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(slab_start_phys_addr), slab_region_size, KMemoryRegionType_DramKernelSlab)); + KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_DramKernelSlab)->SetPairAddress(GetInteger(slab_region_start)); /* Map and clear the slab region. */ ttbr1_table.Map(slab_region_start, slab_region_size, slab_start_phys_addr, KernelRwDataAttribute, g_initial_page_allocator); @@ -222,69 +222,69 @@ namespace ams::kern::init { const size_t page_table_heap_size = GetInteger(resource_end_phys_addr) - GetInteger(slab_end_phys_addr); MESOSPHERE_INIT_ABORT_UNLESS(page_table_heap_size / 4_MB > 2); - /* Insert a physical block for the kernel page table heap region */ - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(slab_end_phys_addr), page_table_heap_size, KMemoryRegionType_DramKernelPtHeap)); + /* Insert a physical region for the kernel page table heap region */ + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(slab_end_phys_addr), page_table_heap_size, KMemoryRegionType_DramKernelPtHeap)); - /* All DRAM blocks that we haven't tagged by this point will be mapped under the linear mapping. Tag them. */ - for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) { - if (block.GetType() == KMemoryRegionType_Dram) { - block.SetTypeAttribute(KMemoryRegionAttr_LinearMapped); + /* All DRAM regions that we haven't tagged by this point will be mapped under the linear mapping. Tag them. */ + for (auto ®ion : KMemoryLayout::GetPhysicalMemoryRegionTree()) { + if (region.GetType() == KMemoryRegionType_Dram) { + region.SetTypeAttribute(KMemoryRegionAttr_LinearMapped); } } /* Setup the linear mapping region. */ constexpr size_t LinearRegionAlign = 1_GB; - const auto linear_extents = KMemoryLayout::GetPhysicalMemoryBlockTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped); - const KPhysicalAddress aligned_linear_phys_start = util::AlignDown(linear_extents.first_block->GetAddress(), LinearRegionAlign); - const size_t linear_region_size = util::AlignUp(linear_extents.last_block->GetEndAddress(), LinearRegionAlign) - GetInteger(aligned_linear_phys_start); - const KVirtualAddress linear_region_start = KMemoryLayout::GetVirtualMemoryBlockTree().GetRandomAlignedRegionWithGuard(linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign); + const auto linear_extents = KMemoryLayout::GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped); + const KPhysicalAddress aligned_linear_phys_start = util::AlignDown(linear_extents.first_region->GetAddress(), LinearRegionAlign); + const size_t linear_region_size = util::AlignUp(linear_extents.last_region->GetEndAddress(), LinearRegionAlign) - GetInteger(aligned_linear_phys_start); + const KVirtualAddress linear_region_start = KMemoryLayout::GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard(linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign); const uintptr_t linear_region_phys_to_virt_diff = GetInteger(linear_region_start) - GetInteger(aligned_linear_phys_start); - /* Map and create blocks for all the linearly-mapped data. */ - for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) { - if (!block.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { + /* Map and create regions for all the linearly-mapped data. */ + for (auto ®ion : KMemoryLayout::GetPhysicalMemoryRegionTree()) { + if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { continue; } - const uintptr_t block_virt_addr = block.GetAddress() + linear_region_phys_to_virt_diff; - ttbr1_table.Map(block_virt_addr, block.GetSize(), block.GetAddress(), KernelRwDataAttribute, g_initial_page_allocator); + const uintptr_t region_virt_addr = region.GetAddress() + linear_region_phys_to_virt_diff; + ttbr1_table.Map(region_virt_addr, region.GetSize(), region.GetAddress(), KernelRwDataAttribute, g_initial_page_allocator); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(block_virt_addr, block.GetSize(), GetTypeForVirtualLinearMapping(block.GetType()))); - block.SetPairAddress(block_virt_addr); - KMemoryLayout::GetVirtualMemoryBlockTree().FindContainingBlock(block_virt_addr)->SetPairAddress(block.GetAddress()); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(region_virt_addr, region.GetSize(), GetTypeForVirtualLinearMapping(region.GetType()))); + region.SetPairAddress(region_virt_addr); + KMemoryLayout::GetVirtualMemoryRegionTree().FindContainingRegion(region_virt_addr)->SetPairAddress(region.GetAddress()); } - /* Create blocks for and map all core-specific stacks. */ + /* Create regions for and map all core-specific stacks. */ for (size_t i = 0; i < cpu::NumCores; i++) { MapStackForCore(ttbr1_table, KMemoryRegionType_KernelMiscMainStack, i); MapStackForCore(ttbr1_table, KMemoryRegionType_KernelMiscIdleStack, i); MapStackForCore(ttbr1_table, KMemoryRegionType_KernelMiscExceptionStack, i); } - /* Setup the KCoreLocalRegion blocks. */ - SetupCoreLocalRegionMemoryBlocks(ttbr1_table, g_initial_page_allocator); + /* Setup the KCoreLocalRegion regions. */ + SetupCoreLocalRegionMemoryRegions(ttbr1_table, g_initial_page_allocator); /* Finalize the page allocator, we're done allocating at this point. */ const KPhysicalAddress final_init_page_table_end_address = g_initial_page_allocator.GetFinalNextAddress(); const size_t init_page_table_region_size = GetInteger(final_init_page_table_end_address) - GetInteger(resource_end_phys_addr); - /* Insert blocks for the initial page table region. */ - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryBlockTree().Insert(GetInteger(resource_end_phys_addr), init_page_table_region_size, KMemoryRegionType_DramKernelInitPt)); - MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryBlockTree().Insert(GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, init_page_table_region_size, KMemoryRegionType_VirtualKernelInitPt)); + /* Insert regions for the initial page table region. */ + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr), init_page_table_region_size, KMemoryRegionType_DramKernelInitPt)); + MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, init_page_table_region_size, KMemoryRegionType_VirtualKernelInitPt)); - /* All linear-mapped DRAM blocks that we haven't tagged by this point will be allocated to some pool partition. Tag them. */ - for (auto &block : KMemoryLayout::GetPhysicalMemoryBlockTree()) { - if (block.GetType() == KMemoryRegionType_DramLinearMapped) { - block.SetType(KMemoryRegionType_DramPoolPartition); + /* All linear-mapped DRAM regions that we haven't tagged by this point will be allocated to some pool partition. Tag them. */ + for (auto ®ion : KMemoryLayout::GetPhysicalMemoryRegionTree()) { + if (region.GetType() == KMemoryRegionType_DramLinearMapped) { + region.SetType(KMemoryRegionType_DramPoolPartition); } } - /* Setup all other memory blocks needed to arrange the pool partitions. */ - SetupPoolPartitionMemoryBlocks(); + /* Setup all other memory regions needed to arrange the pool partitions. */ + SetupPoolPartitionMemoryRegions(); - /* Cache all linear blocks in their own trees for faster access, later. */ - KMemoryLayout::InitializeLinearMemoryBlockTrees(aligned_linear_phys_start, linear_region_start); + /* Cache all linear regions in their own trees for faster access, later. */ + KMemoryLayout::InitializeLinearMemoryRegionTrees(aligned_linear_phys_start, linear_region_start); /* Turn on all other cores. */ TurnOnAllCores(GetInteger(ttbr1_table.GetPhysicalAddress(reinterpret_cast(::ams::kern::init::StartOtherCore))));