2020-01-29 06:09:47 +00:00
|
|
|
/*
|
2021-10-04 19:59:10 +00:00
|
|
|
* Copyright (c) Atmosphère-NX
|
2020-01-29 06:09:47 +00:00
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#pragma once
|
2020-01-30 23:29:51 +00:00
|
|
|
#include <mesosphere/kern_common.hpp>
|
2020-01-29 06:09:47 +00:00
|
|
|
#include <mesosphere/init/kern_init_page_table_select.hpp>
|
2020-08-23 20:19:45 +00:00
|
|
|
#include <mesosphere/kern_k_memory_region.hpp>
|
2020-01-29 06:09:47 +00:00
|
|
|
|
2020-07-15 10:07:00 +00:00
|
|
|
#if defined(ATMOSPHERE_BOARD_NINTENDO_NX)
|
2020-08-23 20:19:45 +00:00
|
|
|
#include <mesosphere/board/nintendo/nx/kern_k_memory_layout.hpp>
|
2021-10-21 03:53:42 +00:00
|
|
|
#elif defined(ATMOSPHERE_BOARD_QEMU_VIRT)
|
|
|
|
#include <mesosphere/board/qemu/virt/kern_k_memory_layout.hpp>
|
2020-07-15 10:07:00 +00:00
|
|
|
#else
|
|
|
|
#error "Unknown board for KMemoryLayout"
|
|
|
|
#endif
|
|
|
|
|
2020-01-29 06:09:47 +00:00
|
|
|
namespace ams::kern {
|
|
|
|
|
|
|
|
constexpr size_t KernelAslrAlignment = 2_MB;
|
|
|
|
constexpr size_t KernelVirtualAddressSpaceWidth = size_t(1ul) << 39ul;
|
|
|
|
constexpr size_t KernelPhysicalAddressSpaceWidth = size_t(1ul) << 48ul;
|
|
|
|
|
|
|
|
constexpr size_t KernelVirtualAddressSpaceBase = 0ul - KernelVirtualAddressSpaceWidth;
|
|
|
|
constexpr size_t KernelVirtualAddressSpaceEnd = KernelVirtualAddressSpaceBase + (KernelVirtualAddressSpaceWidth - KernelAslrAlignment);
|
|
|
|
constexpr size_t KernelVirtualAddressSpaceLast = KernelVirtualAddressSpaceEnd - 1ul;
|
|
|
|
constexpr size_t KernelVirtualAddressSpaceSize = KernelVirtualAddressSpaceEnd - KernelVirtualAddressSpaceBase;
|
|
|
|
|
|
|
|
constexpr size_t KernelPhysicalAddressSpaceBase = 0ul;
|
|
|
|
constexpr size_t KernelPhysicalAddressSpaceEnd = KernelPhysicalAddressSpaceBase + KernelPhysicalAddressSpaceWidth;
|
|
|
|
constexpr size_t KernelPhysicalAddressSpaceLast = KernelPhysicalAddressSpaceEnd - 1ul;
|
|
|
|
constexpr size_t KernelPhysicalAddressSpaceSize = KernelPhysicalAddressSpaceEnd - KernelPhysicalAddressSpaceBase;
|
|
|
|
|
2020-08-17 23:48:52 +00:00
|
|
|
constexpr size_t KernelPageTableHeapSize = init::KInitialPageTable::GetMaximumOverheadSize(kern::MainMemorySizeMax);
|
2020-08-17 23:45:41 +00:00
|
|
|
constexpr size_t KernelInitialPageHeapSize = 128_KB;
|
|
|
|
|
|
|
|
constexpr size_t KernelSlabHeapDataSize = 5_MB;
|
2021-04-07 19:48:20 +00:00
|
|
|
constexpr size_t KernelSlabHeapGapsSizeMax = 2_MB - 64_KB;
|
|
|
|
constexpr size_t KernelSlabHeapSize = KernelSlabHeapDataSize + KernelSlabHeapGapsSizeMax;
|
2020-08-17 23:45:41 +00:00
|
|
|
|
|
|
|
/* NOTE: This is calculated from KThread slab counts, assuming KThread size <= 0x860. */
|
|
|
|
constexpr size_t KernelSlabHeapAdditionalSize = 0x68000;
|
|
|
|
|
|
|
|
constexpr size_t KernelResourceSize = KernelPageTableHeapSize + KernelInitialPageHeapSize + KernelSlabHeapSize;
|
|
|
|
|
2020-01-29 06:09:47 +00:00
|
|
|
class KMemoryLayout {
|
|
|
|
private:
|
2021-10-23 22:25:20 +00:00
|
|
|
static constinit inline uintptr_t s_linear_phys_to_virt_diff;
|
|
|
|
static constinit inline uintptr_t s_linear_virt_to_phys_diff;
|
|
|
|
static constinit inline KMemoryRegionTree s_virtual_tree;
|
|
|
|
static constinit inline KMemoryRegionTree s_physical_tree;
|
|
|
|
static constinit inline KMemoryRegionTree s_virtual_linear_tree;
|
|
|
|
static constinit inline KMemoryRegionTree s_physical_linear_tree;
|
2020-02-09 09:16:13 +00:00
|
|
|
private:
|
2020-08-03 19:06:24 +00:00
|
|
|
template<typename AddressType> requires IsKTypedAddress<AddressType>
|
|
|
|
static ALWAYS_INLINE bool IsTypedAddress(const KMemoryRegion *®ion, AddressType address, KMemoryRegionTree &tree, KMemoryRegionType type) {
|
|
|
|
/* Check if the cached region already contains the address. */
|
|
|
|
if (region != nullptr && region->Contains(GetInteger(address))) {
|
|
|
|
return true;
|
|
|
|
}
|
2020-01-29 06:09:47 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
/* Find the containing region, and update the cache. */
|
|
|
|
if (const KMemoryRegion *found = tree.Find(GetInteger(address)); found != nullptr && found->IsDerivedFrom(type)) {
|
|
|
|
region = found;
|
|
|
|
return true;
|
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
2020-02-19 13:35:22 +00:00
|
|
|
}
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
template<typename AddressType> requires IsKTypedAddress<AddressType>
|
|
|
|
static ALWAYS_INLINE bool IsTypedAddress(const KMemoryRegion *®ion, AddressType address, size_t size, KMemoryRegionTree &tree, KMemoryRegionType type) {
|
|
|
|
/* Get the end of the checked region. */
|
|
|
|
const uintptr_t last_address = GetInteger(address) + size - 1;
|
2020-02-19 13:35:22 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
/* Walk the tree to verify the region is correct. */
|
|
|
|
const KMemoryRegion *cur = (region != nullptr && region->Contains(GetInteger(address))) ? region : tree.Find(GetInteger(address));
|
|
|
|
while (cur != nullptr && cur->IsDerivedFrom(type)) {
|
|
|
|
if (last_address <= cur->GetLastAddress()) {
|
|
|
|
region = cur;
|
|
|
|
return true;
|
|
|
|
}
|
2020-02-19 13:35:22 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
cur = cur->GetNext();
|
|
|
|
}
|
|
|
|
return false;
|
2020-02-19 13:35:22 +00:00
|
|
|
}
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
template<typename AddressType> requires IsKTypedAddress<AddressType>
|
|
|
|
static ALWAYS_INLINE const KMemoryRegion *Find(AddressType address, const KMemoryRegionTree &tree) {
|
|
|
|
return tree.Find(GetInteger(address));
|
2020-01-31 00:51:35 +00:00
|
|
|
}
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static ALWAYS_INLINE KMemoryRegion &Dereference(KMemoryRegion *region) {
|
|
|
|
MESOSPHERE_INIT_ABORT_UNLESS(region != nullptr);
|
|
|
|
return *region;
|
2020-01-31 00:51:35 +00:00
|
|
|
}
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static ALWAYS_INLINE const KMemoryRegion &Dereference(const KMemoryRegion *region) {
|
|
|
|
MESOSPHERE_INIT_ABORT_UNLESS(region != nullptr);
|
|
|
|
return *region;
|
2020-01-29 06:09:47 +00:00
|
|
|
}
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static ALWAYS_INLINE KVirtualAddress GetStackTopAddress(s32 core_id, KMemoryRegionType type) {
|
2020-12-02 01:14:23 +00:00
|
|
|
const auto ®ion = Dereference(GetVirtualMemoryRegionTree().FindByTypeAndAttribute(type, static_cast<u32>(core_id)));
|
|
|
|
MESOSPHERE_INIT_ABORT_UNLESS(region.GetEndAddress() != 0);
|
|
|
|
return region.GetEndAddress();
|
2020-01-29 22:26:24 +00:00
|
|
|
}
|
2020-08-03 19:06:24 +00:00
|
|
|
public:
|
|
|
|
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualMemoryRegionTree() { return s_virtual_tree; }
|
|
|
|
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalMemoryRegionTree() { return s_physical_tree; }
|
|
|
|
static ALWAYS_INLINE KMemoryRegionTree &GetVirtualLinearMemoryRegionTree() { return s_virtual_linear_tree; }
|
|
|
|
static ALWAYS_INLINE KMemoryRegionTree &GetPhysicalLinearMemoryRegionTree() { return s_physical_linear_tree; }
|
2020-01-29 22:26:24 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static ALWAYS_INLINE KVirtualAddress GetLinearVirtualAddress(KPhysicalAddress address) { return GetInteger(address) + s_linear_phys_to_virt_diff; }
|
|
|
|
static ALWAYS_INLINE KPhysicalAddress GetLinearPhysicalAddress(KVirtualAddress address) { return GetInteger(address) + s_linear_virt_to_phys_diff; }
|
2020-01-29 22:26:24 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE const KMemoryRegion *Find(KVirtualAddress address) { return Find(address, GetVirtualMemoryRegionTree()); }
|
|
|
|
static NOINLINE const KMemoryRegion *Find(KPhysicalAddress address) { return Find(address, GetPhysicalMemoryRegionTree()); }
|
2020-01-30 09:41:59 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE const KMemoryRegion *FindLinear(KVirtualAddress address) { return Find(address, GetVirtualLinearMemoryRegionTree()); }
|
|
|
|
static NOINLINE const KMemoryRegion *FindLinear(KPhysicalAddress address) { return Find(address, GetPhysicalLinearMemoryRegionTree()); }
|
2020-01-29 22:26:24 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE KVirtualAddress GetMainStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscMainStack); }
|
|
|
|
static NOINLINE KVirtualAddress GetIdleStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscIdleStack); }
|
|
|
|
static NOINLINE KVirtualAddress GetExceptionStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscExceptionStack); }
|
2020-02-05 22:07:51 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE KVirtualAddress GetSlabRegionAddress() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelSlab)).GetAddress(); }
|
2020-02-06 09:05:35 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetDeviceRegion(KMemoryRegionType type) { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(type)); }
|
|
|
|
static KPhysicalAddress GetDevicePhysicalAddress(KMemoryRegionType type) { return GetDeviceRegion(type).GetAddress(); }
|
|
|
|
static KVirtualAddress GetDeviceVirtualAddress(KMemoryRegionType type) { return GetDeviceRegion(type).GetPairAddress(); }
|
2020-02-15 08:00:35 +00:00
|
|
|
|
2020-08-24 07:03:15 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetPoolManagementRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramPoolManagement)); }
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetPageTableHeapRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramKernelPtHeap)); }
|
|
|
|
static NOINLINE const KMemoryRegion &GetKernelStackRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelStack)); }
|
|
|
|
static NOINLINE const KMemoryRegion &GetTempRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelTemp)); }
|
2021-09-18 05:01:58 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetSlabRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelSlab)); }
|
2020-02-07 01:40:57 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetKernelTraceBufferRegion() { return Dereference(GetVirtualLinearMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramKernelTraceBuffer)); }
|
2020-02-07 12:58:35 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetVirtualLinearRegion(KVirtualAddress address) { return Dereference(FindLinear(address)); }
|
2021-09-18 07:11:10 +00:00
|
|
|
static NOINLINE const KMemoryRegion &GetPhysicalLinearRegion(KPhysicalAddress address) { return Dereference(FindLinear(address)); }
|
2020-02-08 03:16:09 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE const KMemoryRegion *GetPhysicalKernelTraceBufferRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_KernelTraceBuffer); }
|
|
|
|
static NOINLINE const KMemoryRegion *GetPhysicalOnMemoryBootImageRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_OnMemoryBootImage); }
|
|
|
|
static NOINLINE const KMemoryRegion *GetPhysicalDTBRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DTB); }
|
2020-02-17 10:49:21 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramUserPool); }
|
|
|
|
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address) { return IsTypedAddress(region, address, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramUserPool); }
|
2020-02-07 04:36:26 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramUserPool); }
|
|
|
|
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address, size_t size) { return IsTypedAddress(region, address, size, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramUserPool); }
|
2020-07-13 20:24:32 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), static_cast<KMemoryRegionType>(KMemoryRegionAttr_LinearMapped)); }
|
|
|
|
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), static_cast<KMemoryRegionType>(KMemoryRegionAttr_LinearMapped)); }
|
2020-02-17 10:49:21 +00:00
|
|
|
|
2020-02-06 13:34:38 +00:00
|
|
|
static NOINLINE std::tuple<size_t, size_t> GetTotalAndKernelMemorySizes() {
|
|
|
|
size_t total_size = 0, kernel_size = 0;
|
2020-08-03 19:06:24 +00:00
|
|
|
for (const auto ®ion : GetPhysicalMemoryRegionTree()) {
|
|
|
|
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) {
|
|
|
|
total_size += region.GetSize();
|
2020-08-23 20:19:45 +00:00
|
|
|
if (!region.IsDerivedFrom(KMemoryRegionType_DramUserPool)) {
|
2020-08-03 19:06:24 +00:00
|
|
|
kernel_size += region.GetSize();
|
2020-02-06 13:34:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return std::make_tuple(total_size, kernel_size);
|
|
|
|
}
|
|
|
|
|
2021-04-22 02:24:41 +00:00
|
|
|
static void InitializeLinearMemoryAddresses(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start) {
|
|
|
|
/* Set static differences. */
|
|
|
|
s_linear_phys_to_virt_diff = GetInteger(linear_virtual_start) - GetInteger(aligned_linear_phys_start);
|
|
|
|
s_linear_virt_to_phys_diff = GetInteger(aligned_linear_phys_start) - GetInteger(linear_virtual_start);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void InitializeLinearMemoryRegionTrees();
|
|
|
|
|
2020-08-17 23:45:41 +00:00
|
|
|
static size_t GetResourceRegionSizeForInit();
|
2020-02-09 09:16:13 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetKernelRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Kernel); }
|
|
|
|
static NOINLINE auto GetKernelCodeRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelCode); }
|
|
|
|
static NOINLINE auto GetKernelStackRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelStack); }
|
|
|
|
static NOINLINE auto GetKernelMiscRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelMisc); }
|
|
|
|
static NOINLINE auto GetKernelSlabRegionExtents() { return GetVirtualMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelSlab); }
|
2020-02-09 09:16:13 +00:00
|
|
|
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetLinearRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_LinearMapped); }
|
2020-02-09 09:16:13 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetLinearRegionVirtualExtents() {
|
2020-12-15 05:18:14 +00:00
|
|
|
const auto physical = GetLinearRegionPhysicalExtents();
|
|
|
|
return KMemoryRegion(GetInteger(GetLinearVirtualAddress(physical.GetAddress())), GetInteger(GetLinearVirtualAddress(physical.GetLastAddress())), 0, KMemoryRegionType_None);
|
2020-02-09 09:16:13 +00:00
|
|
|
}
|
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetMainMemoryPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram); }
|
|
|
|
static NOINLINE auto GetCarveoutRegionExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_CarveoutProtected); }
|
2020-02-09 09:16:13 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE auto GetKernelRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelBase); }
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetKernelCodeRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelCode); }
|
|
|
|
static NOINLINE auto GetKernelSlabRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelSlab); }
|
|
|
|
static NOINLINE auto GetKernelPageTableHeapRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelPtHeap); }
|
|
|
|
static NOINLINE auto GetKernelInitPageTableRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelInitPt); }
|
2020-02-09 09:16:13 +00:00
|
|
|
|
2020-08-23 20:19:45 +00:00
|
|
|
static NOINLINE auto GetKernelPoolManagementRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramPoolManagement); }
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetKernelPoolPartitionRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramPoolPartition); }
|
|
|
|
static NOINLINE auto GetKernelSystemPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemPool); }
|
|
|
|
static NOINLINE auto GetKernelSystemNonSecurePoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemNonSecurePool); }
|
|
|
|
static NOINLINE auto GetKernelAppletPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramAppletPool); }
|
|
|
|
static NOINLINE auto GetKernelApplicationPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramApplicationPool); }
|
2020-08-01 00:01:01 +00:00
|
|
|
|
2020-08-03 19:06:24 +00:00
|
|
|
static NOINLINE auto GetKernelTraceBufferRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_KernelTraceBuffer); }
|
2020-01-29 06:09:47 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
namespace init {
|
|
|
|
|
|
|
|
/* These should be generic, regardless of board. */
|
2020-02-05 22:16:56 +00:00
|
|
|
void SetupPoolPartitionMemoryRegions();
|
2020-01-29 06:09:47 +00:00
|
|
|
|
|
|
|
/* These may be implemented in a board-specific manner. */
|
2020-02-05 22:16:56 +00:00
|
|
|
void SetupDevicePhysicalMemoryRegions();
|
|
|
|
void SetupDramPhysicalMemoryRegions();
|
2020-01-29 06:09:47 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|