mirror of
https://github.com/Atmosphere-NX/Atmosphere
synced 2024-12-22 20:31:14 +00:00
kern: revamp KMemoryRegionType to better encode derivation hierarchies
This commit is contained in:
parent
d50c7c5c79
commit
1983f86875
17 changed files with 746 additions and 426 deletions
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* All architectures must define NumArchitectureDeviceRegions. */
|
||||
constexpr inline const auto NumArchitectureDeviceRegions = 3;
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_Uart = KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 0);
|
||||
constexpr inline const auto KMemoryRegionType_InterruptCpuInterface = KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 1).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
constexpr inline const auto KMemoryRegionType_InterruptDistributor = KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 2).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
static_assert(KMemoryRegionType_Uart .GetValue() == (0x1D));
|
||||
static_assert(KMemoryRegionType_InterruptCpuInterface.GetValue() == (0x2D | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_InterruptDistributor .GetValue() == (0x4D | KMemoryRegionAttr_NoUserMap));
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* All architectures must define NumArchitectureDeviceRegions. */
|
||||
constexpr inline const auto NumArchitectureDeviceRegions = 3;
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_Uart = KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 0);
|
||||
constexpr inline const auto KMemoryRegionType_InterruptCpuInterface = KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 1).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
constexpr inline const auto KMemoryRegionType_InterruptDistributor = KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 2).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
static_assert(KMemoryRegionType_Uart .GetValue() == (0x1D));
|
||||
static_assert(KMemoryRegionType_InterruptCpuInterface.GetValue() == (0x2D | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_InterruptDistributor .GetValue() == (0x4D | KMemoryRegionAttr_NoUserMap));
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* All architectures must define NumBoardDeviceRegions. */
|
||||
constexpr inline const auto NumBoardDeviceRegions = 5;
|
||||
/* UNUSED: .Derive(NumBoardDeviceRegions, 0); */
|
||||
constexpr inline const auto KMemoryRegionType_MemoryController = KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 1).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
constexpr inline const auto KMemoryRegionType_MemoryController1 = KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 2).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
constexpr inline const auto KMemoryRegionType_MemoryController0 = KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 3).SetAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
constexpr inline const auto KMemoryRegionType_PowerManagementController = KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 4).DeriveTransition();
|
||||
static_assert(KMemoryRegionType_MemoryController .GetValue() == (0x55 | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_MemoryController1 .GetValue() == (0x65 | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_MemoryController0 .GetValue() == (0x95 | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_PowerManagementController.GetValue() == (0x1A5));
|
|
@ -16,9 +16,10 @@
|
|||
#pragma once
|
||||
#include <mesosphere/kern_common.hpp>
|
||||
#include <mesosphere/init/kern_init_page_table_select.hpp>
|
||||
#include <mesosphere/kern_k_memory_region.hpp>
|
||||
|
||||
#if defined(ATMOSPHERE_BOARD_NINTENDO_NX)
|
||||
#include <mesosphere/board/nintendo/nx/kern_k_memory_layout.board.nintendo_nx.hpp>
|
||||
#include <mesosphere/board/nintendo/nx/kern_k_memory_layout.hpp>
|
||||
#else
|
||||
#error "Unknown board for KMemoryLayout"
|
||||
#endif
|
||||
|
@ -52,385 +53,6 @@ namespace ams::kern {
|
|||
|
||||
constexpr size_t KernelResourceSize = KernelPageTableHeapSize + KernelInitialPageHeapSize + KernelSlabHeapSize;
|
||||
|
||||
enum KMemoryRegionType : u32 {
|
||||
KMemoryRegionAttr_CarveoutProtected = 0x04000000,
|
||||
KMemoryRegionAttr_DidKernelMap = 0x08000000,
|
||||
KMemoryRegionAttr_ShouldKernelMap = 0x10000000,
|
||||
KMemoryRegionAttr_UserReadOnly = 0x20000000,
|
||||
KMemoryRegionAttr_NoUserMap = 0x40000000,
|
||||
KMemoryRegionAttr_LinearMapped = 0x80000000,
|
||||
|
||||
KMemoryRegionType_None = 0,
|
||||
KMemoryRegionType_Kernel = 1,
|
||||
KMemoryRegionType_Dram = 2,
|
||||
KMemoryRegionType_CoreLocal = 4,
|
||||
|
||||
KMemoryRegionType_VirtualKernelPtHeap = 0x2A,
|
||||
KMemoryRegionType_VirtualKernelTraceBuffer = 0x4A,
|
||||
KMemoryRegionType_VirtualKernelInitPt = 0x19A,
|
||||
|
||||
KMemoryRegionType_VirtualDramMetadataPool = 0x29A,
|
||||
KMemoryRegionType_VirtualDramManagedPool = 0x31A,
|
||||
KMemoryRegionType_VirtualDramApplicationPool = 0x271A,
|
||||
KMemoryRegionType_VirtualDramAppletPool = 0x1B1A,
|
||||
KMemoryRegionType_VirtualDramSystemPool = 0x2B1A,
|
||||
KMemoryRegionType_VirtualDramSystemNonSecurePool = 0x331A,
|
||||
|
||||
KMemoryRegionType_Uart = 0x1D,
|
||||
KMemoryRegionType_InterruptDistributor = 0x4D | KMemoryRegionAttr_NoUserMap,
|
||||
KMemoryRegionType_InterruptCpuInterface = 0x2D | KMemoryRegionAttr_NoUserMap,
|
||||
|
||||
KMemoryRegionType_MemoryController = 0x55,
|
||||
KMemoryRegionType_MemoryController0 = 0x95,
|
||||
KMemoryRegionType_MemoryController1 = 0x65,
|
||||
KMemoryRegionType_PowerManagementController = 0x1A5,
|
||||
|
||||
KMemoryRegionType_KernelAutoMap = KMemoryRegionType_Kernel | KMemoryRegionAttr_ShouldKernelMap,
|
||||
|
||||
KMemoryRegionType_KernelTemp = 0x31,
|
||||
|
||||
KMemoryRegionType_KernelCode = 0x19,
|
||||
KMemoryRegionType_KernelStack = 0x29,
|
||||
KMemoryRegionType_KernelMisc = 0x49,
|
||||
KMemoryRegionType_KernelSlab = 0x89,
|
||||
|
||||
KMemoryRegionType_KernelMiscMainStack = 0xB49,
|
||||
KMemoryRegionType_KernelMiscMappedDevice = 0xD49,
|
||||
KMemoryRegionType_KernelMiscIdleStack = 0x1349,
|
||||
KMemoryRegionType_KernelMiscUnknownDebug = 0x1549,
|
||||
KMemoryRegionType_KernelMiscExceptionStack = 0x2349,
|
||||
|
||||
KMemoryRegionType_DramLinearMapped = KMemoryRegionType_Dram | KMemoryRegionAttr_LinearMapped,
|
||||
|
||||
KMemoryRegionType_DramReservedEarly = 0x16 | KMemoryRegionAttr_NoUserMap,
|
||||
KMemoryRegionType_DramPoolPartition = 0x26 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped,
|
||||
KMemoryRegionType_DramMetadataPool = 0x166 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_CarveoutProtected,
|
||||
|
||||
KMemoryRegionType_DramNonKernel = 0x1A6 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped,
|
||||
|
||||
KMemoryRegionType_DramApplicationPool = 0x7A6 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped,
|
||||
KMemoryRegionType_DramAppletPool = 0xBA6 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped,
|
||||
KMemoryRegionType_DramSystemNonSecurePool = 0xDA6 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped,
|
||||
KMemoryRegionType_DramSystemPool = 0x13A6 | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_CarveoutProtected,
|
||||
|
||||
KMemoryRegionType_DramKernel = 0xE | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_CarveoutProtected,
|
||||
KMemoryRegionType_DramKernelCode = 0xCE | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_CarveoutProtected,
|
||||
KMemoryRegionType_DramKernelSlab = 0x14E | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_CarveoutProtected,
|
||||
KMemoryRegionType_DramKernelPtHeap = 0x24E | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_LinearMapped,
|
||||
KMemoryRegionType_DramKernelInitPt = 0x44E | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_LinearMapped,
|
||||
|
||||
/* These regions aren't normally mapped in retail kernel. */
|
||||
KMemoryRegionType_KernelTraceBuffer = 0xA6 | KMemoryRegionAttr_UserReadOnly | KMemoryRegionAttr_LinearMapped,
|
||||
KMemoryRegionType_OnMemoryBootImage = 0x156,
|
||||
KMemoryRegionType_DTB = 0x256,
|
||||
};
|
||||
|
||||
constexpr ALWAYS_INLINE KMemoryRegionType GetTypeForVirtualLinearMapping(u32 type_id) {
|
||||
if (type_id == (type_id | KMemoryRegionType_KernelTraceBuffer)) {
|
||||
return KMemoryRegionType_VirtualKernelTraceBuffer;
|
||||
} else if (type_id == (type_id | KMemoryRegionType_DramKernelPtHeap)) {
|
||||
return KMemoryRegionType_VirtualKernelPtHeap;
|
||||
} else {
|
||||
return KMemoryRegionType_Dram;
|
||||
}
|
||||
}
|
||||
|
||||
class KMemoryRegionTree;
|
||||
|
||||
class KMemoryRegion : public util::IntrusiveRedBlackTreeBaseNode<KMemoryRegion> {
|
||||
NON_COPYABLE(KMemoryRegion);
|
||||
NON_MOVEABLE(KMemoryRegion);
|
||||
private:
|
||||
friend class KMemoryRegionTree;
|
||||
private:
|
||||
uintptr_t address;
|
||||
uintptr_t pair_address;
|
||||
size_t region_size;
|
||||
u32 attributes;
|
||||
u32 type_id;
|
||||
public:
|
||||
static constexpr ALWAYS_INLINE int Compare(const KMemoryRegion &lhs, const KMemoryRegion &rhs) {
|
||||
if (lhs.GetAddress() < rhs.GetAddress()) {
|
||||
return -1;
|
||||
} else if (lhs.GetAddress() <= rhs.GetLastAddress()) {
|
||||
return 0;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
public:
|
||||
constexpr ALWAYS_INLINE KMemoryRegion() : address(0), pair_address(0), region_size(0), attributes(0), type_id(0) { /* ... */ }
|
||||
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, uintptr_t p, u32 r, u32 t) :
|
||||
address(a), pair_address(p), region_size(rs), attributes(r), type_id(t)
|
||||
{
|
||||
/* ... */
|
||||
}
|
||||
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, u32 r, u32 t) : KMemoryRegion(a, rs, std::numeric_limits<uintptr_t>::max(), r, t) { /* ... */ }
|
||||
private:
|
||||
constexpr ALWAYS_INLINE void Reset(uintptr_t a, uintptr_t rs, uintptr_t p, u32 r, u32 t) {
|
||||
this->address = a;
|
||||
this->pair_address = p;
|
||||
this->region_size = rs;
|
||||
this->attributes = r;
|
||||
this->type_id = t;
|
||||
}
|
||||
public:
|
||||
constexpr ALWAYS_INLINE uintptr_t GetAddress() const {
|
||||
return this->address;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetPairAddress() const {
|
||||
return this->pair_address;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE size_t GetSize() const {
|
||||
return this->region_size;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetEndAddress() const {
|
||||
return this->GetAddress() + this->GetSize();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetLastAddress() const {
|
||||
return this->GetEndAddress() - 1;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE u32 GetAttributes() const {
|
||||
return this->attributes;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE u32 GetType() const {
|
||||
return this->type_id;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE void SetType(u32 type) {
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(this->CanDerive(type));
|
||||
this->type_id = type;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool Contains(uintptr_t address) const {
|
||||
return this->GetAddress() <= address && address <= this->GetLastAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool IsDerivedFrom(u32 type) const {
|
||||
return (this->GetType() | type) == this->GetType();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool HasTypeAttribute(KMemoryRegionType attr) const {
|
||||
return (this->GetType() | attr) == this->GetType();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool CanDerive(u32 type) const {
|
||||
return (this->GetType() | type) == type;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE void SetPairAddress(uintptr_t a) {
|
||||
this->pair_address = a;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE void SetTypeAttribute(KMemoryRegionType attr) {
|
||||
this->type_id |= attr;
|
||||
}
|
||||
};
|
||||
static_assert(std::is_trivially_destructible<KMemoryRegion>::value);
|
||||
|
||||
class KMemoryRegionTree {
|
||||
public:
|
||||
struct DerivedRegionExtents {
|
||||
const KMemoryRegion *first_region;
|
||||
const KMemoryRegion *last_region;
|
||||
|
||||
constexpr DerivedRegionExtents() : first_region(nullptr), last_region(nullptr) { /* ... */ }
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetAddress() const {
|
||||
return this->first_region->GetAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetEndAddress() const {
|
||||
return this->last_region->GetEndAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE size_t GetSize() const {
|
||||
return this->GetEndAddress() - this->GetAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetLastAddress() const {
|
||||
return this->GetEndAddress() - 1;
|
||||
}
|
||||
};
|
||||
private:
|
||||
using TreeType = util::IntrusiveRedBlackTreeBaseTraits<KMemoryRegion>::TreeType<KMemoryRegion>;
|
||||
public:
|
||||
using value_type = TreeType::value_type;
|
||||
using size_type = TreeType::size_type;
|
||||
using difference_type = TreeType::difference_type;
|
||||
using pointer = TreeType::pointer;
|
||||
using const_pointer = TreeType::const_pointer;
|
||||
using reference = TreeType::reference;
|
||||
using const_reference = TreeType::const_reference;
|
||||
using iterator = TreeType::iterator;
|
||||
using const_iterator = TreeType::const_iterator;
|
||||
private:
|
||||
TreeType tree;
|
||||
public:
|
||||
constexpr ALWAYS_INLINE KMemoryRegionTree() : tree() { /* ... */ }
|
||||
public:
|
||||
KMemoryRegion *FindModifiable(uintptr_t address) {
|
||||
if (auto it = this->find(KMemoryRegion(address, 1, 0, 0)); it != this->end()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const KMemoryRegion *Find(uintptr_t address) const {
|
||||
if (auto it = this->find(KMemoryRegion(address, 1, 0, 0)); it != this->cend()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindByType(u32 type_id) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); ++it) {
|
||||
if (it->GetType() == type_id) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindByTypeAndAttribute(u32 type_id, u32 attr) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); ++it) {
|
||||
if (it->GetType() == type_id && it->GetAttributes() == attr) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindFirstDerived(u32 type_id) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindLastDerived(u32 type_id) const {
|
||||
const KMemoryRegion *region = nullptr;
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
region = std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return region;
|
||||
}
|
||||
|
||||
|
||||
DerivedRegionExtents GetDerivedRegionExtents(u32 type_id) const {
|
||||
DerivedRegionExtents extents;
|
||||
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.first_region == nullptr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.last_region == nullptr);
|
||||
|
||||
for (auto it = this->cbegin(); it != this->cend(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
if (extents.first_region == nullptr) {
|
||||
extents.first_region = std::addressof(*it);
|
||||
}
|
||||
extents.last_region = std::addressof(*it);
|
||||
}
|
||||
}
|
||||
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.first_region != nullptr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.last_region != nullptr);
|
||||
|
||||
return extents;
|
||||
}
|
||||
public:
|
||||
NOINLINE void InsertDirectly(uintptr_t address, size_t size, u32 attr = 0, u32 type_id = 0);
|
||||
NOINLINE bool Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr = 0, u32 old_attr = 0);
|
||||
|
||||
NOINLINE KVirtualAddress GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id);
|
||||
|
||||
ALWAYS_INLINE KVirtualAddress GetRandomAlignedRegionWithGuard(size_t size, size_t alignment, u32 type_id, size_t guard_size) {
|
||||
return this->GetRandomAlignedRegion(size + 2 * guard_size, alignment, type_id) + guard_size;
|
||||
}
|
||||
public:
|
||||
/* Iterator accessors. */
|
||||
iterator begin() {
|
||||
return this->tree.begin();
|
||||
}
|
||||
|
||||
const_iterator begin() const {
|
||||
return this->tree.begin();
|
||||
}
|
||||
|
||||
iterator end() {
|
||||
return this->tree.end();
|
||||
}
|
||||
|
||||
const_iterator end() const {
|
||||
return this->tree.end();
|
||||
}
|
||||
|
||||
const_iterator cbegin() const {
|
||||
return this->begin();
|
||||
}
|
||||
|
||||
const_iterator cend() const {
|
||||
return this->end();
|
||||
}
|
||||
|
||||
iterator iterator_to(reference ref) {
|
||||
return this->tree.iterator_to(ref);
|
||||
}
|
||||
|
||||
const_iterator iterator_to(const_reference ref) const {
|
||||
return this->tree.iterator_to(ref);
|
||||
}
|
||||
|
||||
/* Content management. */
|
||||
bool empty() const {
|
||||
return this->tree.empty();
|
||||
}
|
||||
|
||||
reference back() {
|
||||
return this->tree.back();
|
||||
}
|
||||
|
||||
const_reference back() const {
|
||||
return this->tree.back();
|
||||
}
|
||||
|
||||
reference front() {
|
||||
return this->tree.front();
|
||||
}
|
||||
|
||||
const_reference front() const {
|
||||
return this->tree.front();
|
||||
}
|
||||
|
||||
/* GCC over-eagerly inlines this operation. */
|
||||
NOINLINE iterator insert(reference ref) {
|
||||
return this->tree.insert(ref);
|
||||
}
|
||||
|
||||
NOINLINE iterator erase(iterator it) {
|
||||
return this->tree.erase(it);
|
||||
}
|
||||
|
||||
iterator find(const_reference ref) const {
|
||||
return this->tree.find(ref);
|
||||
}
|
||||
|
||||
iterator nfind(const_reference ref) const {
|
||||
return this->tree.nfind(ref);
|
||||
}
|
||||
};
|
||||
|
||||
class KMemoryLayout {
|
||||
private:
|
||||
static /* constinit */ inline uintptr_t s_linear_phys_to_virt_diff;
|
||||
|
@ -512,21 +134,19 @@ namespace ams::kern {
|
|||
static NOINLINE KVirtualAddress GetExceptionStackTopAddress(s32 core_id) { return GetStackTopAddress(core_id, KMemoryRegionType_KernelMiscExceptionStack); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetSlabRegionAddress() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelSlab)).GetAddress(); }
|
||||
static NOINLINE KVirtualAddress GetCoreLocalRegionAddress() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_CoreLocal)).GetAddress(); }
|
||||
static NOINLINE KVirtualAddress GetCoreLocalRegionAddress() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_CoreLocalRegion)).GetAddress(); }
|
||||
|
||||
static NOINLINE KVirtualAddress GetInterruptDistributorAddress() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_InterruptDistributor)).GetPairAddress(); }
|
||||
static NOINLINE KVirtualAddress GetInterruptCpuInterfaceAddress() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_InterruptCpuInterface)).GetPairAddress(); }
|
||||
static NOINLINE KVirtualAddress GetUartAddress() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_Uart)).GetPairAddress(); }
|
||||
static NOINLINE const KMemoryRegion &GetDeviceRegion(KMemoryRegionType type) { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(type)); }
|
||||
static KPhysicalAddress GetDevicePhysicalAddress(KMemoryRegionType type) { return GetDeviceRegion(type).GetAddress(); }
|
||||
static KVirtualAddress GetDeviceVirtualAddress(KMemoryRegionType type) { return GetDeviceRegion(type).GetPairAddress(); }
|
||||
|
||||
static NOINLINE const KMemoryRegion &GetMemoryControllerRegion() { return Dereference(GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_MemoryController)); }
|
||||
static NOINLINE const KMemoryRegion &GetPoolManagementRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_DramPoolManagement)); }
|
||||
static NOINLINE const KMemoryRegion &GetPageTableHeapRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramKernelPtHeap)); }
|
||||
static NOINLINE const KMemoryRegion &GetKernelStackRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelStack)); }
|
||||
static NOINLINE const KMemoryRegion &GetTempRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelTemp)); }
|
||||
static NOINLINE const KMemoryRegion &GetCoreLocalRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_CoreLocalRegion)); }
|
||||
|
||||
static NOINLINE const KMemoryRegion &GetMetadataPoolRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramMetadataPool)); }
|
||||
static NOINLINE const KMemoryRegion &GetPageTableHeapRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_VirtualKernelPtHeap)); }
|
||||
static NOINLINE const KMemoryRegion &GetKernelStackRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelStack)); }
|
||||
static NOINLINE const KMemoryRegion &GetTempRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_KernelTemp)); }
|
||||
static NOINLINE const KMemoryRegion &GetCoreLocalRegion() { return Dereference(GetVirtualMemoryRegionTree().FindByType(KMemoryRegionType_CoreLocal)); }
|
||||
|
||||
static NOINLINE const KMemoryRegion &GetKernelTraceBufferRegion() { return Dereference(GetVirtualLinearMemoryRegionTree().FindByType(KMemoryRegionType_VirtualKernelTraceBuffer)); }
|
||||
static NOINLINE const KMemoryRegion &GetKernelTraceBufferRegion() { return Dereference(GetVirtualLinearMemoryRegionTree().FindByType(KMemoryRegionType_VirtualDramKernelTraceBuffer)); }
|
||||
|
||||
static NOINLINE const KMemoryRegion &GetVirtualLinearRegion(KVirtualAddress address) { return Dereference(FindLinear(address)); }
|
||||
|
||||
|
@ -534,21 +154,21 @@ namespace ams::kern {
|
|||
static NOINLINE const KMemoryRegion *GetPhysicalOnMemoryBootImageRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_OnMemoryBootImage); }
|
||||
static NOINLINE const KMemoryRegion *GetPhysicalDTBRegion() { return GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DTB); }
|
||||
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramNonKernel); }
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address) { return IsTypedAddress(region, address, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramManagedPool); }
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramUserPool); }
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address) { return IsTypedAddress(region, address, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramUserPool); }
|
||||
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramNonKernel); }
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address, size_t size) { return IsTypedAddress(region, address, size, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramManagedPool); }
|
||||
static NOINLINE bool IsHeapPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionType_DramUserPool); }
|
||||
static NOINLINE bool IsHeapVirtualAddress(const KMemoryRegion *®ion, KVirtualAddress address, size_t size) { return IsTypedAddress(region, address, size, GetVirtualLinearMemoryRegionTree(), KMemoryRegionType_VirtualDramUserPool); }
|
||||
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionAttr_LinearMapped); }
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), KMemoryRegionAttr_LinearMapped); }
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address) { return IsTypedAddress(region, address, GetPhysicalLinearMemoryRegionTree(), static_cast<KMemoryRegionType>(KMemoryRegionAttr_LinearMapped)); }
|
||||
static NOINLINE bool IsLinearMappedPhysicalAddress(const KMemoryRegion *®ion, KPhysicalAddress address, size_t size) { return IsTypedAddress(region, address, size, GetPhysicalLinearMemoryRegionTree(), static_cast<KMemoryRegionType>(KMemoryRegionAttr_LinearMapped)); }
|
||||
|
||||
static NOINLINE std::tuple<size_t, size_t> GetTotalAndKernelMemorySizes() {
|
||||
size_t total_size = 0, kernel_size = 0;
|
||||
for (const auto ®ion : GetPhysicalMemoryRegionTree()) {
|
||||
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) {
|
||||
total_size += region.GetSize();
|
||||
if (!region.IsDerivedFrom(KMemoryRegionType_DramNonKernel)) {
|
||||
if (!region.IsDerivedFrom(KMemoryRegionType_DramUserPool)) {
|
||||
kernel_size += region.GetSize();
|
||||
}
|
||||
}
|
||||
|
@ -576,14 +196,14 @@ namespace ams::kern {
|
|||
static NOINLINE auto GetMainMemoryPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_Dram); }
|
||||
static NOINLINE auto GetCarveoutRegionExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionAttr_CarveoutProtected); }
|
||||
|
||||
static NOINLINE auto GetKernelRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernel); }
|
||||
static NOINLINE auto GetKernelRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelBase); }
|
||||
static NOINLINE auto GetKernelCodeRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelCode); }
|
||||
static NOINLINE auto GetKernelSlabRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelSlab); }
|
||||
static NOINLINE auto GetKernelPageTableHeapRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelPtHeap); }
|
||||
static NOINLINE auto GetKernelInitPageTableRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramKernelInitPt); }
|
||||
|
||||
static NOINLINE auto GetKernelPoolManagementRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramPoolManagement); }
|
||||
static NOINLINE auto GetKernelPoolPartitionRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramPoolPartition); }
|
||||
static NOINLINE auto GetKernelMetadataPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramMetadataPool); }
|
||||
static NOINLINE auto GetKernelSystemPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemPool); }
|
||||
static NOINLINE auto GetKernelSystemNonSecurePoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramSystemNonSecurePool); }
|
||||
static NOINLINE auto GetKernelAppletPoolRegionPhysicalExtents() { return GetPhysicalMemoryRegionTree().GetDerivedRegionExtents(KMemoryRegionType_DramAppletPool); }
|
||||
|
|
|
@ -0,0 +1,318 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <mesosphere/kern_common.hpp>
|
||||
#include <mesosphere/kern_k_memory_region_type.hpp>
|
||||
|
||||
namespace ams::kern {
|
||||
|
||||
class KMemoryRegionTree;
|
||||
|
||||
class KMemoryRegion : public util::IntrusiveRedBlackTreeBaseNode<KMemoryRegion> {
|
||||
NON_COPYABLE(KMemoryRegion);
|
||||
NON_MOVEABLE(KMemoryRegion);
|
||||
private:
|
||||
friend class KMemoryRegionTree;
|
||||
private:
|
||||
uintptr_t address;
|
||||
uintptr_t pair_address;
|
||||
size_t region_size;
|
||||
u32 attributes;
|
||||
u32 type_id;
|
||||
public:
|
||||
static constexpr ALWAYS_INLINE int Compare(const KMemoryRegion &lhs, const KMemoryRegion &rhs) {
|
||||
if (lhs.GetAddress() < rhs.GetAddress()) {
|
||||
return -1;
|
||||
} else if (lhs.GetAddress() <= rhs.GetLastAddress()) {
|
||||
return 0;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
public:
|
||||
constexpr ALWAYS_INLINE KMemoryRegion() : address(0), pair_address(0), region_size(0), attributes(0), type_id(0) { /* ... */ }
|
||||
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, uintptr_t p, u32 r, u32 t) :
|
||||
address(a), pair_address(p), region_size(rs), attributes(r), type_id(t)
|
||||
{
|
||||
/* ... */
|
||||
}
|
||||
constexpr ALWAYS_INLINE KMemoryRegion(uintptr_t a, size_t rs, u32 r, u32 t) : KMemoryRegion(a, rs, std::numeric_limits<uintptr_t>::max(), r, t) { /* ... */ }
|
||||
private:
|
||||
constexpr ALWAYS_INLINE void Reset(uintptr_t a, uintptr_t rs, uintptr_t p, u32 r, u32 t) {
|
||||
this->address = a;
|
||||
this->pair_address = p;
|
||||
this->region_size = rs;
|
||||
this->attributes = r;
|
||||
this->type_id = t;
|
||||
}
|
||||
public:
|
||||
constexpr ALWAYS_INLINE uintptr_t GetAddress() const {
|
||||
return this->address;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetPairAddress() const {
|
||||
return this->pair_address;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE size_t GetSize() const {
|
||||
return this->region_size;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetEndAddress() const {
|
||||
return this->GetAddress() + this->GetSize();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetLastAddress() const {
|
||||
return this->GetEndAddress() - 1;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE u32 GetAttributes() const {
|
||||
return this->attributes;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE u32 GetType() const {
|
||||
return this->type_id;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE void SetType(u32 type) {
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(this->CanDerive(type));
|
||||
this->type_id = type;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool Contains(uintptr_t address) const {
|
||||
return this->GetAddress() <= address && address <= this->GetLastAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool IsDerivedFrom(u32 type) const {
|
||||
return (this->GetType() | type) == this->GetType();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool HasTypeAttribute(KMemoryRegionAttr attr) const {
|
||||
return (this->GetType() | attr) == this->GetType();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool CanDerive(u32 type) const {
|
||||
return (this->GetType() | type) == type;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE void SetPairAddress(uintptr_t a) {
|
||||
this->pair_address = a;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE void SetTypeAttribute(KMemoryRegionAttr attr) {
|
||||
this->type_id |= attr;
|
||||
}
|
||||
};
|
||||
static_assert(std::is_trivially_destructible<KMemoryRegion>::value);
|
||||
|
||||
class KMemoryRegionTree {
|
||||
public:
|
||||
struct DerivedRegionExtents {
|
||||
const KMemoryRegion *first_region;
|
||||
const KMemoryRegion *last_region;
|
||||
|
||||
constexpr DerivedRegionExtents() : first_region(nullptr), last_region(nullptr) { /* ... */ }
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetAddress() const {
|
||||
return this->first_region->GetAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetEndAddress() const {
|
||||
return this->last_region->GetEndAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE size_t GetSize() const {
|
||||
return this->GetEndAddress() - this->GetAddress();
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE uintptr_t GetLastAddress() const {
|
||||
return this->GetEndAddress() - 1;
|
||||
}
|
||||
};
|
||||
private:
|
||||
using TreeType = util::IntrusiveRedBlackTreeBaseTraits<KMemoryRegion>::TreeType<KMemoryRegion>;
|
||||
public:
|
||||
using value_type = TreeType::value_type;
|
||||
using size_type = TreeType::size_type;
|
||||
using difference_type = TreeType::difference_type;
|
||||
using pointer = TreeType::pointer;
|
||||
using const_pointer = TreeType::const_pointer;
|
||||
using reference = TreeType::reference;
|
||||
using const_reference = TreeType::const_reference;
|
||||
using iterator = TreeType::iterator;
|
||||
using const_iterator = TreeType::const_iterator;
|
||||
private:
|
||||
TreeType tree;
|
||||
public:
|
||||
constexpr ALWAYS_INLINE KMemoryRegionTree() : tree() { /* ... */ }
|
||||
public:
|
||||
KMemoryRegion *FindModifiable(uintptr_t address) {
|
||||
if (auto it = this->find(KMemoryRegion(address, 1, 0, 0)); it != this->end()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const KMemoryRegion *Find(uintptr_t address) const {
|
||||
if (auto it = this->find(KMemoryRegion(address, 1, 0, 0)); it != this->cend()) {
|
||||
return std::addressof(*it);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindByType(u32 type_id) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); ++it) {
|
||||
if (it->GetType() == type_id) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindByTypeAndAttribute(u32 type_id, u32 attr) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); ++it) {
|
||||
if (it->GetType() == type_id && it->GetAttributes() == attr) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindFirstDerived(u32 type_id) const {
|
||||
for (auto it = this->cbegin(); it != this->cend(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
return std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const KMemoryRegion *FindLastDerived(u32 type_id) const {
|
||||
const KMemoryRegion *region = nullptr;
|
||||
for (auto it = this->begin(); it != this->end(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
region = std::addressof(*it);
|
||||
}
|
||||
}
|
||||
return region;
|
||||
}
|
||||
|
||||
|
||||
DerivedRegionExtents GetDerivedRegionExtents(u32 type_id) const {
|
||||
DerivedRegionExtents extents;
|
||||
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.first_region == nullptr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.last_region == nullptr);
|
||||
|
||||
for (auto it = this->cbegin(); it != this->cend(); it++) {
|
||||
if (it->IsDerivedFrom(type_id)) {
|
||||
if (extents.first_region == nullptr) {
|
||||
extents.first_region = std::addressof(*it);
|
||||
}
|
||||
extents.last_region = std::addressof(*it);
|
||||
}
|
||||
}
|
||||
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.first_region != nullptr);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(extents.last_region != nullptr);
|
||||
|
||||
return extents;
|
||||
}
|
||||
public:
|
||||
NOINLINE void InsertDirectly(uintptr_t address, size_t size, u32 attr = 0, u32 type_id = 0);
|
||||
NOINLINE bool Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr = 0, u32 old_attr = 0);
|
||||
|
||||
NOINLINE KVirtualAddress GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id);
|
||||
|
||||
ALWAYS_INLINE KVirtualAddress GetRandomAlignedRegionWithGuard(size_t size, size_t alignment, u32 type_id, size_t guard_size) {
|
||||
return this->GetRandomAlignedRegion(size + 2 * guard_size, alignment, type_id) + guard_size;
|
||||
}
|
||||
public:
|
||||
/* Iterator accessors. */
|
||||
iterator begin() {
|
||||
return this->tree.begin();
|
||||
}
|
||||
|
||||
const_iterator begin() const {
|
||||
return this->tree.begin();
|
||||
}
|
||||
|
||||
iterator end() {
|
||||
return this->tree.end();
|
||||
}
|
||||
|
||||
const_iterator end() const {
|
||||
return this->tree.end();
|
||||
}
|
||||
|
||||
const_iterator cbegin() const {
|
||||
return this->begin();
|
||||
}
|
||||
|
||||
const_iterator cend() const {
|
||||
return this->end();
|
||||
}
|
||||
|
||||
iterator iterator_to(reference ref) {
|
||||
return this->tree.iterator_to(ref);
|
||||
}
|
||||
|
||||
const_iterator iterator_to(const_reference ref) const {
|
||||
return this->tree.iterator_to(ref);
|
||||
}
|
||||
|
||||
/* Content management. */
|
||||
bool empty() const {
|
||||
return this->tree.empty();
|
||||
}
|
||||
|
||||
reference back() {
|
||||
return this->tree.back();
|
||||
}
|
||||
|
||||
const_reference back() const {
|
||||
return this->tree.back();
|
||||
}
|
||||
|
||||
reference front() {
|
||||
return this->tree.front();
|
||||
}
|
||||
|
||||
const_reference front() const {
|
||||
return this->tree.front();
|
||||
}
|
||||
|
||||
/* GCC over-eagerly inlines this operation. */
|
||||
NOINLINE iterator insert(reference ref) {
|
||||
return this->tree.insert(ref);
|
||||
}
|
||||
|
||||
NOINLINE iterator erase(iterator it) {
|
||||
return this->tree.erase(it);
|
||||
}
|
||||
|
||||
iterator find(const_reference ref) const {
|
||||
return this->tree.find(ref);
|
||||
}
|
||||
|
||||
iterator nfind(const_reference ref) const {
|
||||
return this->tree.nfind(ref);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -0,0 +1,300 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <mesosphere/kern_common.hpp>
|
||||
|
||||
namespace ams::kern {
|
||||
|
||||
enum KMemoryRegionType : u32 {};
|
||||
|
||||
enum KMemoryRegionAttr : typename std::underlying_type<KMemoryRegionType>::type {
|
||||
KMemoryRegionAttr_CarveoutProtected = 0x04000000,
|
||||
KMemoryRegionAttr_DidKernelMap = 0x08000000,
|
||||
KMemoryRegionAttr_ShouldKernelMap = 0x10000000,
|
||||
KMemoryRegionAttr_UserReadOnly = 0x20000000,
|
||||
KMemoryRegionAttr_NoUserMap = 0x40000000,
|
||||
KMemoryRegionAttr_LinearMapped = 0x80000000,
|
||||
};
|
||||
|
||||
namespace impl {
|
||||
|
||||
consteval size_t BitsForDeriveSparse(size_t n) {
|
||||
return n + 1;
|
||||
}
|
||||
|
||||
consteval size_t BitsForDeriveDense(size_t n) {
|
||||
AMS_ASSUME(n > 0);
|
||||
|
||||
size_t low = 0, high = 1;
|
||||
for (size_t i = 0; i < n - 1; ++i) {
|
||||
if ((++low) == high) {
|
||||
++high;
|
||||
low = 0;
|
||||
}
|
||||
}
|
||||
return high + 1;
|
||||
}
|
||||
|
||||
class KMemoryRegionTypeValue {
|
||||
private:
|
||||
using ValueType = typename std::underlying_type<KMemoryRegionType>::type;
|
||||
private:
|
||||
ValueType value;
|
||||
size_t next_bit;
|
||||
bool finalized;
|
||||
bool sparse_only;
|
||||
bool dense_only;
|
||||
private:
|
||||
consteval KMemoryRegionTypeValue(ValueType v) : value(v), next_bit(0), finalized(false), sparse_only(false), dense_only(false) { /* ... */ }
|
||||
public:
|
||||
consteval KMemoryRegionTypeValue() : KMemoryRegionTypeValue(0) { /* ... */ }
|
||||
|
||||
consteval operator KMemoryRegionType() const { return static_cast<KMemoryRegionType>(this->value); }
|
||||
consteval ValueType GetValue() const { return this->value; }
|
||||
|
||||
consteval const KMemoryRegionTypeValue &Finalize() { this->finalized = true; return *this; }
|
||||
consteval const KMemoryRegionTypeValue &SetSparseOnly() { this->sparse_only = true; return *this; }
|
||||
consteval const KMemoryRegionTypeValue &SetDenseOnly() { this->dense_only = true; return *this; }
|
||||
|
||||
consteval KMemoryRegionTypeValue &SetAttribute(KMemoryRegionAttr attr) { AMS_ASSUME(!this->finalized); this->value |= attr; return *this; }
|
||||
|
||||
consteval KMemoryRegionTypeValue DeriveInitial(size_t i, size_t next = BITSIZEOF(ValueType)) const {
|
||||
AMS_ASSUME(!this->finalized);
|
||||
AMS_ASSUME(!this->value);
|
||||
AMS_ASSUME(!this->next_bit);
|
||||
AMS_ASSUME(next > i);
|
||||
|
||||
KMemoryRegionTypeValue new_type = *this;
|
||||
new_type.value = (ValueType{1} << i);
|
||||
new_type.next_bit = next;
|
||||
return new_type;
|
||||
}
|
||||
|
||||
consteval KMemoryRegionTypeValue DeriveAttribute(KMemoryRegionAttr attr) const {
|
||||
AMS_ASSUME(!this->finalized);
|
||||
|
||||
KMemoryRegionTypeValue new_type = *this;
|
||||
new_type.value |= attr;
|
||||
return new_type;
|
||||
}
|
||||
|
||||
consteval KMemoryRegionTypeValue DeriveTransition(size_t ofs = 0, size_t adv = 1) const {
|
||||
AMS_ASSUME(!this->finalized);
|
||||
AMS_ASSUME(ofs < adv);
|
||||
AMS_ASSUME(this->next_bit + adv <= BITSIZEOF(ValueType));
|
||||
|
||||
KMemoryRegionTypeValue new_type = *this;
|
||||
new_type.value |= (ValueType{1} << (this->next_bit + ofs));
|
||||
new_type.next_bit += adv;
|
||||
return new_type;
|
||||
}
|
||||
|
||||
consteval KMemoryRegionTypeValue DeriveSparse(size_t ofs, size_t n, size_t i) const {
|
||||
AMS_ASSUME(!this->finalized);
|
||||
AMS_ASSUME(!this->dense_only);
|
||||
AMS_ASSUME(this->next_bit + ofs + n + 1 <= BITSIZEOF(ValueType));
|
||||
AMS_ASSUME(i < n);
|
||||
|
||||
KMemoryRegionTypeValue new_type = *this;
|
||||
new_type.value |= (ValueType{1} << (this->next_bit + ofs));
|
||||
new_type.value |= (ValueType{1} << (this->next_bit + ofs + 1 + i));
|
||||
new_type.next_bit += ofs + n + 1;
|
||||
return new_type;
|
||||
}
|
||||
|
||||
consteval KMemoryRegionTypeValue Derive(size_t n, size_t i) const {
|
||||
AMS_ASSUME(!this->finalized);
|
||||
AMS_ASSUME(!this->sparse_only);
|
||||
AMS_ASSUME(this->next_bit + BitsForDeriveDense(n) <= BITSIZEOF(ValueType));
|
||||
AMS_ASSUME(i < n);
|
||||
|
||||
size_t low = 0, high = 1;
|
||||
for (size_t j = 0; j < i; ++j) {
|
||||
if ((++low) == high) {
|
||||
++high;
|
||||
low = 0;
|
||||
}
|
||||
}
|
||||
AMS_ASSUME(high < BitsForDeriveDense(n));
|
||||
|
||||
|
||||
KMemoryRegionTypeValue new_type = *this;
|
||||
new_type.value |= (ValueType{1} << (this->next_bit + low));
|
||||
new_type.value |= (ValueType{1} << (this->next_bit + high));
|
||||
new_type.next_bit += BitsForDeriveDense(n);
|
||||
return new_type;
|
||||
}
|
||||
|
||||
consteval KMemoryRegionTypeValue Advance(size_t n) const {
|
||||
AMS_ASSUME(!this->finalized);
|
||||
AMS_ASSUME(this->next_bit + n <= BITSIZEOF(ValueType));
|
||||
|
||||
KMemoryRegionTypeValue new_type = *this;
|
||||
new_type.next_bit += n;
|
||||
return new_type;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool IsAncestorOf(ValueType v) const {
|
||||
return (this->value | v) == v;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_None = impl::KMemoryRegionTypeValue();
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_Kernel = KMemoryRegionType_None.DeriveInitial(0, 2);
|
||||
constexpr inline const auto KMemoryRegionType_Dram = KMemoryRegionType_None.DeriveInitial(1, 2);
|
||||
constexpr inline const auto KMemoryRegionType_CoreLocalRegion = KMemoryRegionType_None.DeriveInitial(2).Finalize();
|
||||
static_assert(KMemoryRegionType_Kernel .GetValue() == 0x1);
|
||||
static_assert(KMemoryRegionType_Dram .GetValue() == 0x2);
|
||||
static_assert(KMemoryRegionType_CoreLocalRegion.GetValue() == 0x4);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_DramKernelBase = KMemoryRegionType_Dram.DeriveSparse(0, 3, 0).SetAttribute(KMemoryRegionAttr_NoUserMap).SetAttribute(KMemoryRegionAttr_CarveoutProtected);
|
||||
constexpr inline const auto KMemoryRegionType_DramReservedBase = KMemoryRegionType_Dram.DeriveSparse(0, 3, 1);
|
||||
constexpr inline const auto KMemoryRegionType_DramHeapBase = KMemoryRegionType_Dram.DeriveSparse(0, 3, 2).SetAttribute(KMemoryRegionAttr_LinearMapped);
|
||||
static_assert(KMemoryRegionType_DramKernelBase .GetValue() == (0xE | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramReservedBase.GetValue() == (0x16));
|
||||
static_assert(KMemoryRegionType_DramHeapBase .GetValue() == (0x26 | KMemoryRegionAttr_LinearMapped));
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_DramKernelCode = KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 0);
|
||||
constexpr inline const auto KMemoryRegionType_DramKernelSlab = KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 1);
|
||||
constexpr inline const auto KMemoryRegionType_DramKernelPtHeap = KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 2).SetAttribute(KMemoryRegionAttr_LinearMapped);
|
||||
constexpr inline const auto KMemoryRegionType_DramKernelInitPt = KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 3).SetAttribute(KMemoryRegionAttr_LinearMapped);
|
||||
static_assert(KMemoryRegionType_DramKernelCode .GetValue() == (0xCE | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramKernelSlab .GetValue() == (0x14E | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramKernelPtHeap.GetValue() == (0x24E | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped));
|
||||
static_assert(KMemoryRegionType_DramKernelInitPt.GetValue() == (0x44E | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_LinearMapped));
|
||||
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_DramReservedEarly = KMemoryRegionType_DramReservedBase.DeriveAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
static_assert(KMemoryRegionType_DramReservedEarly.GetValue() == (0x16 | KMemoryRegionAttr_NoUserMap));
|
||||
|
||||
/* UNUSED: DeriveSparse(0, 3, 0); */
|
||||
constexpr inline const auto KMemoryRegionType_OnMemoryBootImage = KMemoryRegionType_DramReservedBase.DeriveSparse(0, 3, 1);
|
||||
constexpr inline const auto KMemoryRegionType_DTB = KMemoryRegionType_DramReservedBase.DeriveSparse(0, 3, 2);
|
||||
static_assert(KMemoryRegionType_OnMemoryBootImage.GetValue() == 0x156);
|
||||
static_assert(KMemoryRegionType_DTB.GetValue() == 0x256);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_KernelTraceBuffer = KMemoryRegionType_DramHeapBase.DeriveTransition(1, 3).SetAttribute(KMemoryRegionAttr_UserReadOnly);
|
||||
static_assert(KMemoryRegionType_KernelTraceBuffer.GetValue() == (0xA6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_UserReadOnly));
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_DramPoolPartition = KMemoryRegionType_DramHeapBase.DeriveAttribute(KMemoryRegionAttr_NoUserMap);
|
||||
static_assert(KMemoryRegionType_DramPoolPartition.GetValue() == (0x26 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap));
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_DramPoolManagement = KMemoryRegionType_DramPoolPartition.DeriveTransition(0, 2).DeriveTransition();
|
||||
constexpr inline const auto KMemoryRegionType_DramUserPool = KMemoryRegionType_DramPoolPartition.DeriveTransition(1, 2).DeriveTransition();
|
||||
static_assert(KMemoryRegionType_DramPoolManagement.GetValue() == (0x166 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramUserPool.GetValue() == (0x1A6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap));
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_DramApplicationPool = KMemoryRegionType_DramUserPool.Derive(4, 0);
|
||||
constexpr inline const auto KMemoryRegionType_DramAppletPool = KMemoryRegionType_DramUserPool.Derive(4, 1);
|
||||
constexpr inline const auto KMemoryRegionType_DramSystemNonSecurePool = KMemoryRegionType_DramUserPool.Derive(4, 2);
|
||||
constexpr inline const auto KMemoryRegionType_DramSystemPool = KMemoryRegionType_DramUserPool.Derive(4, 3).SetAttribute(KMemoryRegionAttr_CarveoutProtected);
|
||||
static_assert(KMemoryRegionType_DramApplicationPool .GetValue() == (0x7A6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramAppletPool .GetValue() == (0xBA6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramSystemNonSecurePool.GetValue() == (0xDA6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap));
|
||||
static_assert(KMemoryRegionType_DramSystemPool .GetValue() == (0x13A6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap | KMemoryRegionAttr_CarveoutProtected));
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramHeapBase = KMemoryRegionType_Dram.DeriveSparse(1, 3, 0);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramKernelPtHeap = KMemoryRegionType_Dram.DeriveSparse(1, 3, 1);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramKernelTraceBuffer = KMemoryRegionType_Dram.DeriveSparse(1, 3, 2);
|
||||
static_assert(KMemoryRegionType_VirtualDramHeapBase .GetValue() == 0x1A);
|
||||
static_assert(KMemoryRegionType_VirtualDramKernelPtHeap .GetValue() == 0x2A);
|
||||
static_assert(KMemoryRegionType_VirtualDramKernelTraceBuffer.GetValue() == 0x4A);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramKernelInitPt = KMemoryRegionType_VirtualDramHeapBase.Derive(3, 0);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramPoolManagement = KMemoryRegionType_VirtualDramHeapBase.Derive(3, 1);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramUserPool = KMemoryRegionType_VirtualDramHeapBase.Derive(3, 2);
|
||||
static_assert(KMemoryRegionType_VirtualDramKernelInitPt .GetValue() == 0x19A);
|
||||
static_assert(KMemoryRegionType_VirtualDramPoolManagement.GetValue() == 0x29A);
|
||||
static_assert(KMemoryRegionType_VirtualDramUserPool .GetValue() == 0x31A);
|
||||
|
||||
/* NOTE: For unknown reason, the pools are derived out-of-order here. */
|
||||
/* It's worth eventually trying to understand why Nintendo made this choice. */
|
||||
/* UNUSED: .Derive(6, 0); */
|
||||
/* UNUSED: .Derive(6, 1); */
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramAppletPool = KMemoryRegionType_VirtualDramUserPool.Derive(6, 2);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramApplicationPool = KMemoryRegionType_VirtualDramUserPool.Derive(6, 3);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramSystemNonSecurePool = KMemoryRegionType_VirtualDramUserPool.Derive(6, 4);
|
||||
constexpr inline const auto KMemoryRegionType_VirtualDramSystemPool = KMemoryRegionType_VirtualDramUserPool.Derive(6, 5);
|
||||
static_assert(KMemoryRegionType_VirtualDramAppletPool .GetValue() == 0x1B1A);
|
||||
static_assert(KMemoryRegionType_VirtualDramApplicationPool .GetValue() == 0x271A);
|
||||
static_assert(KMemoryRegionType_VirtualDramSystemNonSecurePool.GetValue() == 0x2B1A);
|
||||
static_assert(KMemoryRegionType_VirtualDramSystemPool .GetValue() == 0x331A);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_ArchDeviceBase = KMemoryRegionType_Kernel.DeriveTransition(0, 1).SetSparseOnly();
|
||||
constexpr inline const auto KMemoryRegionType_BoardDeviceBase = KMemoryRegionType_Kernel.DeriveTransition(0, 2).SetDenseOnly();
|
||||
static_assert(KMemoryRegionType_ArchDeviceBase .GetValue() == 0x5);
|
||||
static_assert(KMemoryRegionType_BoardDeviceBase.GetValue() == 0x5);
|
||||
|
||||
#if defined(ATMOSPHERE_ARCH_ARM64)
|
||||
#include <mesosphere/arch/arm64/kern_k_memory_region_device_types.inc>
|
||||
#elif defined(ATMOSPHERE_ARCH_ARM)
|
||||
#include <mesosphere/arch/arm/kern_k_memory_region_device_types.inc>
|
||||
#else
|
||||
/* Default to no architecture devices. */
|
||||
constexpr inline const auto NumArchitectureDeviceRegions = 0;
|
||||
#endif
|
||||
static_assert(NumArchitectureDeviceRegions >= 0);
|
||||
|
||||
#if defined(ATMOSPHERE_BOARD_NINTENDO_NX)
|
||||
#include <mesosphere/board/nintendo/nx/kern_k_memory_region_device_types.inc>
|
||||
#else
|
||||
/* Default to no board devices. */
|
||||
constexpr inline const auto NumBoardDeviceRegions = 0;
|
||||
#endif
|
||||
static_assert(NumBoardDeviceRegions >= 0);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_KernelCode = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 0);
|
||||
constexpr inline const auto KMemoryRegionType_KernelStack = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 1);
|
||||
constexpr inline const auto KMemoryRegionType_KernelMisc = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 2);
|
||||
constexpr inline const auto KMemoryRegionType_KernelSlab = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 3);
|
||||
static_assert(KMemoryRegionType_KernelCode .GetValue() == 0x19);
|
||||
static_assert(KMemoryRegionType_KernelStack.GetValue() == 0x29);
|
||||
static_assert(KMemoryRegionType_KernelMisc .GetValue() == 0x49);
|
||||
static_assert(KMemoryRegionType_KernelSlab .GetValue() == 0x89);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_KernelMiscDerivedBase = KMemoryRegionType_KernelMisc.DeriveTransition();
|
||||
static_assert(KMemoryRegionType_KernelMiscDerivedBase.GetValue() == 0x149);
|
||||
|
||||
/* UNUSED: .Derive(7, 0); */
|
||||
constexpr inline const auto KMemoryRegionType_KernelMiscMainStack = KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 1);
|
||||
constexpr inline const auto KMemoryRegionType_KernelMiscMappedDevice = KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 2);
|
||||
constexpr inline const auto KMemoryRegionType_KernelMiscIdleStack = KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 3);
|
||||
constexpr inline const auto KMemoryRegionType_KernelMiscUnknownDebug = KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 4);
|
||||
/* UNUSED: .Derive(7, 5); */
|
||||
constexpr inline const auto KMemoryRegionType_KernelMiscExceptionStack = KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 6);
|
||||
static_assert(KMemoryRegionType_KernelMiscMainStack .GetValue() == 0xB49);
|
||||
static_assert(KMemoryRegionType_KernelMiscMappedDevice .GetValue() == 0xD49);
|
||||
static_assert(KMemoryRegionType_KernelMiscIdleStack .GetValue() == 0x1349);
|
||||
static_assert(KMemoryRegionType_KernelMiscUnknownDebug .GetValue() == 0x1549);
|
||||
static_assert(KMemoryRegionType_KernelMiscExceptionStack.GetValue() == 0x2349);
|
||||
|
||||
constexpr inline const auto KMemoryRegionType_KernelTemp = KMemoryRegionType_Kernel.Advance(2).Derive(2, 0);
|
||||
static_assert(KMemoryRegionType_KernelTemp.GetValue() == 0x31);
|
||||
|
||||
constexpr ALWAYS_INLINE KMemoryRegionType GetTypeForVirtualLinearMapping(u32 type_id) {
|
||||
if (KMemoryRegionType_KernelTraceBuffer.IsAncestorOf(type_id)) {
|
||||
return KMemoryRegionType_VirtualDramKernelTraceBuffer;
|
||||
} else if (KMemoryRegionType_DramKernelPtHeap.IsAncestorOf(type_id)) {
|
||||
return KMemoryRegionType_VirtualDramKernelPtHeap;
|
||||
} else {
|
||||
return KMemoryRegionType_Dram;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -39,8 +39,8 @@ namespace ams::kern::arch::arm {
|
|||
|
||||
void KInterruptController::Initialize(s32 core_id) {
|
||||
/* Setup pointers to ARM mmio. */
|
||||
this->gicd = GetPointer<volatile GicDistributor>(KMemoryLayout::GetInterruptDistributorAddress());
|
||||
this->gicc = GetPointer<volatile GicCpuInterface>(KMemoryLayout::GetInterruptCpuInterfaceAddress());
|
||||
this->gicd = GetPointer<volatile GicDistributor >(KMemoryLayout::GetDeviceVirtualAddress(KMemoryRegionType_InterruptDistributor));
|
||||
this->gicc = GetPointer<volatile GicCpuInterface>(KMemoryLayout::GetDeviceVirtualAddress(KMemoryRegionType_InterruptCpuInterface));
|
||||
|
||||
/* Clear CTLRs. */
|
||||
this->gicc->ctlr = 0;
|
||||
|
|
|
@ -405,7 +405,7 @@ namespace ams::kern::board::nintendo::nx {
|
|||
|
||||
void KDevicePageTable::Initialize() {
|
||||
/* Set the memory controller register address. */
|
||||
g_memory_controller_address = KMemoryLayout::GetMemoryControllerRegion().GetAddress();
|
||||
g_memory_controller_address = KMemoryLayout::GetDevicePhysicalAddress(KMemoryRegionType_MemoryController);
|
||||
|
||||
/* Allocate a page to use as a reserved/no device table. */
|
||||
const KVirtualAddress table_virt_addr = Kernel::GetPageTableManager().Allocate();
|
||||
|
|
|
@ -181,7 +181,7 @@ namespace ams::kern::board::nintendo::nx {
|
|||
/* Find the region for the address. */
|
||||
const KMemoryRegion *region = KMemoryLayout::Find(KPhysicalAddress(address));
|
||||
if (AMS_LIKELY(region != nullptr)) {
|
||||
if (AMS_LIKELY(region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController))) {
|
||||
if (AMS_LIKELY(region->IsDerivedFrom(KMemoryRegionType_MemoryController))) {
|
||||
/* Get the offset within the region. */
|
||||
const size_t offset = address - region->GetAddress();
|
||||
MESOSPHERE_ABORT_UNLESS(offset < region->GetSize());
|
||||
|
@ -206,9 +206,9 @@ namespace ams::kern::board::nintendo::nx {
|
|||
}
|
||||
|
||||
/* Memory controller is allowed if the register is whitelisted. */
|
||||
if (region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController ) ||
|
||||
region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController0) ||
|
||||
region->IsDerivedFrom(KMemoryRegionAttr_NoUserMap | KMemoryRegionType_MemoryController1))
|
||||
if (region->IsDerivedFrom(KMemoryRegionType_MemoryController ) ||
|
||||
region->IsDerivedFrom(KMemoryRegionType_MemoryController0) ||
|
||||
region->IsDerivedFrom(KMemoryRegionType_MemoryController1))
|
||||
{
|
||||
/* Get the offset within the region. */
|
||||
const size_t offset = address - region->GetAddress();
|
||||
|
|
|
@ -50,7 +50,7 @@ namespace ams::kern {
|
|||
|
||||
bool KDebugLogImpl::Initialize() {
|
||||
/* Set the uart register base address. */
|
||||
g_uart_address = KMemoryLayout::GetUartAddress();
|
||||
g_uart_address = KMemoryLayout::GetDeviceVirtualAddress(KMemoryRegionType_Uart);
|
||||
|
||||
/* Parameters for uart. */
|
||||
constexpr u32 BaudRate = 115200;
|
||||
|
|
|
@ -27,9 +27,9 @@ namespace ams::kern {
|
|||
|
||||
ALWAYS_INLINE bool SetupUartPhysicalMemoryRegion() {
|
||||
#if defined(MESOSPHERE_DEBUG_LOG_USE_UART_A)
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006000, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap);
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006000, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap);
|
||||
#elif defined(MESOSPHERE_DEBUG_LOG_USE_UART_B)
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006040, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap);
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006040, 0x40, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap);
|
||||
#elif defined(MESOSPHERE_DEBUG_LOG_USE_UART_C)
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x70006200, 0x100, KMemoryRegionType_Uart | KMemoryRegionAttr_ShouldKernelMap);
|
||||
#elif defined(MESOSPHERE_DEBUG_LOG_USE_UART_D)
|
||||
|
@ -41,10 +41,10 @@ namespace ams::kern {
|
|||
|
||||
ALWAYS_INLINE bool SetupPowerManagementControllerMemoryRegion() {
|
||||
/* For backwards compatibility, the PMC must remain mappable on < 2.0.0. */
|
||||
const auto pmc_type = GetTargetFirmware() >= TargetFirmware_2_0_0 ? (KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap) : KMemoryRegionType_PowerManagementController;
|
||||
const auto restrict_attr = GetTargetFirmware() >= TargetFirmware_2_0_0 ? KMemoryRegionAttr_NoUserMap : static_cast<KMemoryRegionAttr>(0);
|
||||
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap) &&
|
||||
KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E400, 0xC00, pmc_type);
|
||||
return KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E000, 0x400, KMemoryRegionType_None | restrict_attr) &&
|
||||
KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(0x7000E400, 0xC00, KMemoryRegionType_PowerManagementController | restrict_attr);
|
||||
}
|
||||
|
||||
void InsertPoolPartitionRegionIntoBothTrees(size_t start, size_t size, KMemoryRegionType phys_type, KMemoryRegionType virt_type, u32 &cur_attr) {
|
||||
|
@ -95,7 +95,7 @@ namespace ams::kern {
|
|||
const size_t unsafe_system_pool_min_size = KSystemControl::Init::GetMinimumNonSecureSystemPoolSize();
|
||||
|
||||
/* Find the start of the kernel DRAM region. */
|
||||
const KMemoryRegion *kernel_dram_region = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DramKernel);
|
||||
const KMemoryRegion *kernel_dram_region = KMemoryLayout::GetPhysicalMemoryRegionTree().FindFirstDerived(KMemoryRegionType_DramKernelBase);
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(kernel_dram_region != nullptr);
|
||||
|
||||
const uintptr_t kernel_dram_start = kernel_dram_region->GetAddress();
|
||||
|
@ -141,7 +141,7 @@ namespace ams::kern {
|
|||
const uintptr_t metadata_pool_start = unsafe_system_pool_start - total_overhead_size;
|
||||
const size_t metadata_pool_size = total_overhead_size;
|
||||
u32 metadata_pool_attr = 0;
|
||||
InsertPoolPartitionRegionIntoBothTrees(metadata_pool_start, metadata_pool_size, KMemoryRegionType_DramMetadataPool, KMemoryRegionType_VirtualDramMetadataPool, metadata_pool_attr);
|
||||
InsertPoolPartitionRegionIntoBothTrees(metadata_pool_start, metadata_pool_size, KMemoryRegionType_DramPoolManagement, KMemoryRegionType_VirtualDramPoolManagement, metadata_pool_attr);
|
||||
|
||||
/* Insert the system pool. */
|
||||
const uintptr_t system_pool_size = metadata_pool_start - pool_partitions_start;
|
||||
|
|
|
@ -234,7 +234,7 @@ namespace ams::kern {
|
|||
|
||||
/* Get the virtual address of the core local reigon. */
|
||||
const KVirtualAddress core_local_virt_start = GetCoreLocalRegionVirtualAddress();
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(core_local_virt_start), CoreLocalRegionSize, KMemoryRegionType_CoreLocal));
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(core_local_virt_start), CoreLocalRegionSize, KMemoryRegionType_CoreLocalRegion));
|
||||
|
||||
/* Allocate a page for each core. */
|
||||
KPhysicalAddress core_local_region_start_phys[cpu::NumCores] = {};
|
||||
|
|
|
@ -42,7 +42,7 @@ namespace ams::kern {
|
|||
const KMemoryRegion *region = nullptr;
|
||||
for (const auto &it : KMemoryLayout::GetVirtualMemoryRegionTree()) {
|
||||
/* We only care about regions that we need to create managers for. */
|
||||
if (!it.IsDerivedFrom(KMemoryRegionType_VirtualDramManagedPool)) {
|
||||
if (!it.IsDerivedFrom(KMemoryRegionType_VirtualDramUserPool)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,7 @@ namespace ams::kern {
|
|||
MESOSPHERE_ASSERT(region->GetAddress() != Null<decltype(region->GetAddress())>);
|
||||
MESOSPHERE_ASSERT(region->GetSize() > 0);
|
||||
MESOSPHERE_ASSERT(region->GetEndAddress() >= region->GetAddress());
|
||||
MESOSPHERE_ASSERT(region->IsDerivedFrom(KMemoryRegionType_VirtualDramManagedPool));
|
||||
MESOSPHERE_ASSERT(region->IsDerivedFrom(KMemoryRegionType_VirtualDramUserPool));
|
||||
MESOSPHERE_ASSERT(region->GetAttributes() == this->num_managers);
|
||||
|
||||
/* Initialize a new manager for the region. */
|
||||
|
|
|
@ -153,7 +153,7 @@ namespace ams::kern {
|
|||
PrintMemoryRegion(" InitPageTable", KMemoryLayout::GetKernelInitPageTableRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" MemoryPoolRegion", KMemoryLayout::GetKernelPoolPartitionRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" System", KMemoryLayout::GetKernelSystemPoolRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" Internal", KMemoryLayout::GetKernelMetadataPoolRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" Management", KMemoryLayout::GetKernelPoolManagementRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" SystemUnsafe", KMemoryLayout::GetKernelSystemNonSecurePoolRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" Applet", KMemoryLayout::GetKernelAppletPoolRegionPhysicalExtents());
|
||||
PrintMemoryRegion(" Application", KMemoryLayout::GetKernelApplicationPoolRegionPhysicalExtents());
|
||||
|
|
|
@ -51,8 +51,8 @@ namespace ams::kern {
|
|||
|
||||
/* Initialize the memory manager and the KPageBuffer slabheap. */
|
||||
{
|
||||
const auto &metadata_region = KMemoryLayout::GetMetadataPoolRegion();
|
||||
Kernel::GetMemoryManager().Initialize(metadata_region.GetAddress(), metadata_region.GetSize());
|
||||
const auto &management_region = KMemoryLayout::GetPoolManagementRegion();
|
||||
Kernel::GetMemoryManager().Initialize(management_region.GetAddress(), management_region.GetSize());
|
||||
init::InitializeKPageBufferSlabHeap();
|
||||
}
|
||||
|
||||
|
|
|
@ -170,8 +170,13 @@ namespace ams::kern::init {
|
|||
|
||||
/* Automatically map in devices that have auto-map attributes. */
|
||||
for (auto ®ion : KMemoryLayout::GetPhysicalMemoryRegionTree()) {
|
||||
/* We only care about automatically-mapped regions. */
|
||||
if (!region.IsDerivedFrom(KMemoryRegionType_KernelAutoMap)) {
|
||||
/* We only care about kernel regions. */
|
||||
if (!region.IsDerivedFrom(KMemoryRegionType_Kernel)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Check whether we should map the region. */
|
||||
if (!region.HasTypeAttribute(KMemoryRegionAttr_ShouldKernelMap)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -301,11 +306,11 @@ namespace ams::kern::init {
|
|||
|
||||
/* Insert regions for the initial page table region. */
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetPhysicalMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr), init_page_table_region_size, KMemoryRegionType_DramKernelInitPt));
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, init_page_table_region_size, KMemoryRegionType_VirtualKernelInitPt));
|
||||
MESOSPHERE_INIT_ABORT_UNLESS(KMemoryLayout::GetVirtualMemoryRegionTree().Insert(GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, init_page_table_region_size, KMemoryRegionType_VirtualDramKernelInitPt));
|
||||
|
||||
/* All linear-mapped DRAM regions that we haven't tagged by this point will be allocated to some pool partition. Tag them. */
|
||||
for (auto ®ion : KMemoryLayout::GetPhysicalMemoryRegionTree()) {
|
||||
if (region.GetType() == KMemoryRegionType_DramLinearMapped) {
|
||||
if (region.GetType() == KMemoryRegionType_Dram && region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
|
||||
region.SetType(KMemoryRegionType_DramPoolPartition);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue