kern: Implement KAutoObject, KSlabHeap, KLightLock

This commit is contained in:
Michael Scire 2020-01-29 22:06:25 -08:00
parent bb4ade30e4
commit 2faf3d33b5
16 changed files with 923 additions and 5 deletions

View file

@ -43,8 +43,13 @@
#include "mesosphere/kern_k_memory_manager.hpp" #include "mesosphere/kern_k_memory_manager.hpp"
#include "mesosphere/kern_k_interrupt_task_manager.hpp" #include "mesosphere/kern_k_interrupt_task_manager.hpp"
#include "mesosphere/kern_k_core_local_region.hpp" #include "mesosphere/kern_k_core_local_region.hpp"
#include "mesosphere/kern_k_slab_heap.hpp"
#include "mesosphere/kern_k_light_lock.hpp"
#include "mesosphere/kern_kernel.hpp" #include "mesosphere/kern_kernel.hpp"
/* Auto Objects. */
#include "mesosphere/kern_k_auto_object.hpp"
/* Supervisor Calls. */ /* Supervisor Calls. */
#include "mesosphere/kern_svc.hpp" #include "mesosphere/kern_svc.hpp"

View file

@ -0,0 +1,217 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <vapours.hpp>
#include <mesosphere/kern_panic.hpp>
#include <mesosphere/kern_k_typed_address.hpp>
#include <mesosphere/kern_k_class_token.hpp>
namespace ams::kern {
class KProcess;
#define MESOSPHERE_AUTOOBJECT_TRAITS(CLASS) \
private: \
friend class KClassTokenGenerator; \
static constexpr inline auto ObjectType = KClassTokenGenerator::ObjectType::CLASS; \
static constexpr inline const char * const TypeName = #CLASS; \
static constexpr inline ClassTokenType ClassToken = ClassToken<CLASS>; \
public: \
static constexpr ALWAYS_INLINE TypeObj GetStaticTypeObj() { return TypeObj(TypeName, ClassToken); } \
static constexpr ALWAYS_INLINE const char *GetStaticTypeName() { return TypeName; } \
virtual TypeObj GetTypeObj() const { return TypeObj(TypeName, ClassToken); } \
virtual const char *GetTypeName() { return TypeName; } \
private:
class KAutoObject {
NON_COPYABLE(KAutoObject);
NON_MOVEABLE(KAutoObject);
protected:
class TypeObj {
private:
const char *name;
ClassTokenType class_token;
public:
constexpr explicit TypeObj(const char *n, ClassTokenType tok) : name(n), class_token(tok) { /* ... */ }
constexpr ALWAYS_INLINE const char *GetName() const { return this->name; }
constexpr ALWAYS_INLINE ClassTokenType GetClassToken() const { return this->class_token; }
constexpr ALWAYS_INLINE bool operator==(const TypeObj &rhs) {
return this->GetClassToken() == rhs.GetClassToken();
}
constexpr ALWAYS_INLINE bool operator!=(const TypeObj &rhs) {
return this->GetClassToken() != rhs.GetClassToken();
}
constexpr ALWAYS_INLINE bool IsDerivedFrom(const TypeObj &rhs) {
return (this->GetClassToken() | rhs.GetClassToken()) == this->GetClassToken();
}
};
private:
std::atomic<u32> ref_count;
public:
static KAutoObject *Create(KAutoObject *ptr);
public:
constexpr ALWAYS_INLINE explicit KAutoObject() : ref_count(0) { /* ... */ }
virtual ~KAutoObject() { /* ... */ }
/* Destroy is responsible for destroying the auto object's resources when ref_count hits zero. */
virtual void Destroy() { /* ... */ }
/* Finalize is responsible for cleaning up resource, but does not destroy the object. */
virtual void Finalize() { /* ... */ }
virtual KProcess *GetOwner() const { return nullptr; }
u32 GetReferenceCount() const {
return this->ref_count;
}
ALWAYS_INLINE bool IsDerivedFrom(const TypeObj &rhs) const {
return this->GetTypeObj().IsDerivedFrom(rhs);
}
ALWAYS_INLINE bool IsDerivedFrom(const KAutoObject &rhs) const {
return this->IsDerivedFrom(rhs.GetTypeObj());
}
template<typename Derived>
ALWAYS_INLINE Derived DynamicCast() {
static_assert(std::is_pointer<Derived>::value);
using DerivedType = typename std::remove_pointer<Derived>::type;
if (AMS_LIKELY(this->IsDerivedFrom(DerivedType::GetStaticTypeObj()))) {
return static_cast<Derived>(this);
} else {
return nullptr;
}
}
template<typename Derived>
ALWAYS_INLINE const Derived DynamicCast() const {
static_assert(std::is_pointer<Derived>::value);
using DerivedType = typename std::remove_pointer<Derived>::type;
if (AMS_LIKELY(this->IsDerivedFrom(DerivedType::GetStaticTypeObj()))) {
return static_cast<Derived>(this);
} else {
return nullptr;
}
}
ALWAYS_INLINE bool Open() {
/* Atomically increment the reference count, only if it's positive. */
u32 cur_ref_count = this->ref_count.load(std::memory_order_acquire);
do {
if (AMS_UNLIKELY(cur_ref_count == 0)) {
return false;
}
MESOSPHERE_ABORT_UNLESS(cur_ref_count < cur_ref_count + 1);
} while (!this->ref_count.compare_exchange_weak(cur_ref_count, cur_ref_count + 1, std::memory_order_relaxed));
return true;
}
ALWAYS_INLINE void Close() {
/* Atomically decrement the reference count, not allowing it to become negative. */
u32 cur_ref_count = this->ref_count.load(std::memory_order_acquire);
do {
MESOSPHERE_ABORT_UNLESS(cur_ref_count > 0);
} while (!this->ref_count.compare_exchange_weak(cur_ref_count, cur_ref_count - 1, std::memory_order_relaxed));
/* If ref count hits zero, destroy the object. */
if (cur_ref_count - 1 == 0) {
this->Destroy();
}
}
/* Ensure that we have functional type object getters. */
MESOSPHERE_AUTOOBJECT_TRAITS(KAutoObject);
};
class KAutoObjectWithListContainer;
class KAutoObjectWithList : public KAutoObject {
private:
friend class KAutoObjectWithListContainer;
private:
util::IntrusiveRedBlackTreeNode list_node;
public:
static ALWAYS_INLINE int Compare(const KAutoObjectWithList &lhs, const KAutoObjectWithList &rhs) {
const u64 lid = lhs.GetId();
const u64 rid = rhs.GetId();
if (lid < rid) {
return -1;
} else if (lid > rid) {
return 1;
} else {
return 0;
}
}
public:
virtual u64 GetId() const {
return reinterpret_cast<u64>(this);
}
};
template<typename T>
class KScopedAutoObject {
static_assert(std::is_base_of<KAutoObject, T>::value);
NON_COPYABLE(KScopedAutoObject);
private:
T *obj;
private:
constexpr ALWAYS_INLINE void Swap(KScopedAutoObject &rhs) {
/* TODO: C++20 constexpr std::swap */
T *tmp = rhs.obj;
rhs.obj = this->obj;
this->obj = tmp;
}
public:
constexpr ALWAYS_INLINE KScopedAutoObject() : obj(nullptr) { /* ... */ }
constexpr ALWAYS_INLINE KScopedAutoObject(T *o) : obj(o) { /* ... */ }
ALWAYS_INLINE ~KScopedAutoObject() {
if (this->obj != nullptr) {
this->obj->Close();
}
this->obj = nullptr;
}
constexpr ALWAYS_INLINE KScopedAutoObject(KScopedAutoObject &&rhs) {
this->obj = rhs.obj;
rhs.obj = nullptr;
}
constexpr ALWAYS_INLINE KScopedAutoObject &operator=(KScopedAutoObject &&rhs) {
rhs.Swap(*this);
return *this;
}
constexpr ALWAYS_INLINE T *operator->() { return this->obj; }
constexpr ALWAYS_INLINE T &operator*() { return *this->obj; }
constexpr ALWAYS_INLINE void Reset(T *o) {
KScopedAutoObject(o).Swap(*this);
}
};
}

View file

@ -0,0 +1,65 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <vapours.hpp>
#include <mesosphere/kern_k_auto_object.hpp>
#include <mesosphere/kern_k_light_lock.hpp>
namespace ams::kern {
class KAutoObjectWithListContainer {
NON_COPYABLE(KAutoObjectWithListContainer);
NON_MOVEABLE(KAutoObjectWithListContainer);
private:
using ListType = util::IntrusiveRedBlackTreeMemberTraits<&KAutoObjectWithList::list_node>::TreeType<KAutoObjectWithList>;
public:
class ListAccessor : public KScopedLightLock {
private:
ListType &list;
public:
explicit ListAccessor(KAutoObjectWithListContainer *container) : KScopedLightLock(container->lock), list(container->object_list) { /* ... */ }
explicit ListAccessor(KAutoObjectWithListContainer &container) : KScopedLightLock(container.lock), list(container.object_list) { /* ... */ }
typename ListType::iterator begin() const {
return this->list.begin();
}
typename ListType::iterator end() const {
return this->list.end();
}
typename ListType::iterator find(typename ListType::const_reference ref) const {
return this->list.find(ref);
}
};
friend class ListAccessor;
private:
KLightLock lock;
ListType object_list;
public:
constexpr KAutoObjectWithListContainer() : lock(), object_list() { /* ... */ }
void Initialize() { /* Nothing to do. */ }
void Finalize() { /* Nothing to do. */ }
Result Register(KAutoObjectWithList *obj);
Result Unregister(KAutoObjectWithList *obj);
size_t GetOwnedCount(KProcess *owner);
};
}

View file

@ -0,0 +1,127 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <vapours.hpp>
#include <mesosphere/kern_panic.hpp>
#include <mesosphere/kern_k_typed_address.hpp>
namespace ams::kern {
class KAutoObject;
class KClassTokenGenerator {
public:
using TokenBaseType = u16;
public:
static constexpr size_t BaseClassBits = 8;
static constexpr size_t FinalClassBits = (sizeof(TokenBaseType) * CHAR_BIT) - BaseClassBits;
/* One bit per base class. */
static constexpr size_t NumBaseClasses = BaseClassBits;
/* Final classes are permutations of three bits. */
static constexpr size_t NumFinalClasses = [] {
TokenBaseType index = 0;
for (size_t i = 0; i < FinalClassBits; i++) {
for (size_t j = i + 1; j < FinalClassBits; j++) {
for (size_t k = j + 1; k < FinalClassBits; k++) {
index++;
}
}
}
return index;
}();
private:
template<TokenBaseType Index>
static constexpr inline TokenBaseType BaseClassToken = BIT(Index);
template<TokenBaseType Index>
static constexpr inline TokenBaseType FinalClassToken = [] {
TokenBaseType index = 0;
for (size_t i = 0; i < FinalClassBits; i++) {
for (size_t j = i + 1; j < FinalClassBits; j++) {
for (size_t k = j + 1; k < FinalClassBits; k++) {
if ((index++) == Index) {
return ((1ul << i) | (1ul << j) | (1ul << k)) << BaseClassBits;
}
}
}
}
__builtin_unreachable();
}();
template<typename T>
static constexpr inline TokenBaseType GetClassToken() {
static_assert(std::is_base_of<KAutoObject, T>::value);
if constexpr (std::is_same<T, KAutoObject>::value) {
static_assert(T::ObjectType == ObjectType::BaseClassesStart);
return BaseClassToken<0>;
} else if constexpr (!std::is_final<T>::value) {
static_assert(ObjectType::BaseClassesStart < T::ObjectType && T::ObjectType < ObjectType::BaseClassesEnd);
constexpr auto ClassIndex = static_cast<TokenBaseType>(T::ObjectType) - static_cast<TokenBaseType>(ObjectType::BaseClassesStart);
return BaseClassToken<ClassIndex> | GetClassToken<typename T::BaseClass>();
} else if constexpr (ObjectType::FinalClassesStart <= T::ObjectType && T::ObjectType < ObjectType::FinalClassesEnd) {
constexpr auto ClassIndex = static_cast<TokenBaseType>(T::ObjectType) - static_cast<TokenBaseType>(ObjectType::FinalClassesStart);
return FinalClassToken<ClassIndex> | GetClassToken<typename T::BaseClass>();
} else {
static_assert(!std::is_same<T, T>::value, "GetClassToken: Invalid Type");
}
};
public:
enum class ObjectType {
BaseClassesStart = 0,
KAutoObject = BaseClassesStart,
KSynchronizationObject,
KReadableEvent,
BaseClassesEnd,
FinalClassesStart = BaseClassesEnd,
KInterruptEvent = FinalClassesStart,
KDebug,
KThread,
KServerPort,
KServerSession,
KClientPort,
KClientSession,
KProcess,
KResourceLimit,
KLightSession,
KPort,
KSession,
KSharedMemory,
KEvent,
KWritableEvent,
KLightClientSession,
KLightServerSession,
KTransferMemory,
KDeviceAddressSpace,
KSessionRequest,
KCodeMemory,
FinalClassesEnd = FinalClassesStart + NumFinalClasses,
};
template<typename T>
static constexpr inline TokenBaseType ClassToken = GetClassToken<T>();
};
using ClassTokenType = KClassTokenGenerator::TokenBaseType;
template<typename T>
static constexpr inline ClassTokenType ClassToken = KClassTokenGenerator::ClassToken<T>;
}

View file

@ -0,0 +1,73 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <vapours.hpp>
#include <mesosphere/kern_select_cpu.hpp>
#include <mesosphere/kern_k_current_context.hpp>
namespace ams::kern {
class KLightLock {
private:
std::atomic<uintptr_t> tag;
public:
constexpr KLightLock() : tag(0) { /* ... */ }
void Lock() {
const uintptr_t cur_thread = reinterpret_cast<uintptr_t>(GetCurrentThreadPointer());
while (true) {
uintptr_t old_tag = this->tag.load(std::memory_order_relaxed);
while (!this->tag.compare_exchange_weak(old_tag, (old_tag == 0) ? cur_thread : old_tag | 1, std::memory_order_acquire)) {
/* ... */
}
if ((old_tag == 0) || ((old_tag | 1) == (cur_thread | 1))) {
break;
}
this->LockSlowPath(old_tag | 1, cur_thread);
}
}
void Unlock() {
const uintptr_t cur_thread = reinterpret_cast<uintptr_t>(GetCurrentThreadPointer());
uintptr_t expected = cur_thread;
if (!this->tag.compare_exchange_weak(expected, 0, std::memory_order_release)) {
this->UnlockSlowPath(cur_thread);
}
}
void LockSlowPath(uintptr_t owner, uintptr_t cur_thread);
void UnlockSlowPath(uintptr_t cur_thread);
};
class KScopedLightLock {
private:
KLightLock *lock;
public:
explicit ALWAYS_INLINE KScopedLightLock(KLightLock *l) : lock(l) {
this->lock->Lock();
}
ALWAYS_INLINE ~KScopedLightLock() {
this->lock->Unlock();
}
explicit ALWAYS_INLINE KScopedLightLock(KLightLock &l) : KScopedLightLock(std::addressof(l)) { /* ... */ }
};
}

View file

@ -0,0 +1,183 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <vapours.hpp>
#include <mesosphere/kern_panic.hpp>
#include <mesosphere/kern_k_typed_address.hpp>
namespace ams::kern {
namespace impl {
class KSlabHeapImpl {
NON_COPYABLE(KSlabHeapImpl);
NON_MOVEABLE(KSlabHeapImpl);
public:
struct Node {
Node *next;
};
private:
std::atomic<Node *> head;
size_t obj_size;
public:
constexpr KSlabHeapImpl() : head(nullptr), obj_size(0) { /* ... */ }
void Initialize(size_t size) {
MESOSPHERE_INIT_ABORT_UNLESS(this->head == nullptr);
this->obj_size = size;
}
Node *GetHead() const {
return this->head;
}
size_t GetObjectSize() const {
return this->obj_size;
}
void *Allocate() {
Node *ret = this->head.load();
do {
if (AMS_UNLIKELY(ret == nullptr)) {
break;
}
} while (!this->head.compare_exchange_weak(ret, ret->next));
return ret;
}
void Free(void *obj) {
Node *node = reinterpret_cast<Node *>(obj);
Node *cur_head = this->head.load();
do {
node->next = cur_head;
} while (!this->head.compare_exchange_weak(cur_head, node));
}
};
}
class KSlabHeapBase {
NON_COPYABLE(KSlabHeapBase);
NON_MOVEABLE(KSlabHeapBase);
private:
using Impl = impl::KSlabHeapImpl;
private:
Impl impl;
uintptr_t peak;
uintptr_t start;
uintptr_t end;
private:
ALWAYS_INLINE Impl *GetImpl() {
return std::addressof(this->impl);
}
ALWAYS_INLINE const Impl *GetImpl() const {
return std::addressof(this->impl);
}
public:
constexpr KSlabHeapBase() : impl(), peak(0), start(0), end(0) { /* ... */ }
ALWAYS_INLINE bool Contains(uintptr_t address) const {
return this->start <= address && address < this->end;
}
void InitializeImpl(size_t obj_size, void *memory, size_t memory_size) {
/* Ensure we don't initialize a slab using null memory. */
MESOSPHERE_ABORT_UNLESS(memory != nullptr);
/* Initialize the base allocator. */
this->GetImpl()->Initialize(obj_size);
/* Set our tracking variables. */
const size_t num_obj = (memory_size / obj_size);
this->start = reinterpret_cast<uintptr_t>(memory);
this->end = this->start + num_obj * obj_size;
this->peak = this->start;
/* Free the objects. */
u8 *cur = reinterpret_cast<u8 *>(this->end);
for (size_t i = 0; i < num_obj; i++) {
cur -= obj_size;
this->GetImpl()->Free(cur);
}
}
size_t GetSlabHeapSize() const {
return (this->end - this->start) / this->GetObjectSize();
}
size_t GetObjectSize() const {
return this->GetImpl()->GetObjectSize();
}
void *AllocateImpl() {
void *obj = this->GetImpl()->Allocate();
/* TODO: under some debug define, track the peak for statistics, as N does? */
return obj;
}
void FreeImpl(void *obj) {
/* Don't allow freeing an object that wasn't allocated from this heap. */
MESOSPHERE_ABORT_UNLESS(this->Contains(reinterpret_cast<uintptr_t>(obj)));
this->GetImpl()->Free(obj);
}
size_t GetObjectIndexImpl(const void *obj) const {
return (reinterpret_cast<uintptr_t>(obj) - this->start) / this->GetObjectSize();
}
size_t GetPeakIndex() const {
return this->GetObjectIndexImpl(reinterpret_cast<const void *>(this->peak));
}
uintptr_t GetSlabHeapAddress() const {
return this->start;
}
};
template<typename T>
class KSlabHeap : public KSlabHeapBase {
public:
constexpr KSlabHeap() : KSlabHeapBase() { /* ... */ }
void Initialize(void *memory, size_t memory_size) {
this->InitializeImpl(sizeof(T), memory, memory_size);
}
T *Allocate() {
T *obj = reinterpret_cast<T *>(this->AllocateImpl());
if (AMS_LIKELY(obj != nullptr)) {
new (obj) T();
}
return obj;
}
void Free(T *obj) {
this->FreeImpl(obj);
}
size_t GetObjectIndex(const T *obj) const {
return this->GetObjectIndexImpl(obj);
}
};
}

View file

@ -14,11 +14,24 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
#pragma once #pragma once
#include <mesosphere/kern_slab_helpers.hpp>
namespace ams::kern { namespace ams::kern {
class KThread { class KThread : KAutoObjectWithSlabHeapAndContainer<KThread, /* TODO: KSynchronizationObject */ KAutoObjectWithList> {
public:
struct StackParameters {
alignas(0x10) u8 svc_permission[0x10];
std::atomic<u8> dpc_flags;
u8 current_svc_id;
bool is_calling_svc;
bool is_in_exception_handler;
bool has_exception_svc_perms;
s32 disable_count;
void *context; /* TODO: KThreadContext * */
};
static_assert(alignof(StackParameters) == 0x10);
/* TODO: This should be a KAutoObject, and this is a placeholder definition. */ /* TODO: This should be a KAutoObject, and this is a placeholder definition. */
}; };

View file

@ -31,7 +31,8 @@ namespace ams::kern {
private: private:
static inline State s_state = State::Invalid; static inline State s_state = State::Invalid;
public: public:
static void Initialize(s32 core_id); static NOINLINE void Initialize(s32 core_id);
static NOINLINE void InitializeCoreThreads(s32 core_id);
static ALWAYS_INLINE State GetState() { return s_state; } static ALWAYS_INLINE State GetState() { return s_state; }
static ALWAYS_INLINE void SetState(State state) { s_state = state; } static ALWAYS_INLINE void SetState(State state) { s_state = state; }

View file

@ -0,0 +1,116 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <vapours.hpp>
#include <mesosphere/kern_panic.hpp>
#include <mesosphere/kern_k_auto_object.hpp>
#include <mesosphere/kern_k_slab_heap.hpp>
#include <mesosphere/kern_k_auto_object_container.hpp>
namespace ams::kern {
template<class Derived>
class KSlabAllocated {
private:
static inline KSlabHeap<Derived> s_slab_heap;
public:
constexpr KSlabAllocated() { /* ... */ }
size_t GetSlabIndex() const {
return s_slab_heap.GetIndex(static_cast<const Derived *>(this));
}
public:
static void InitializeSlabHeap(void *memory, size_t memory_size) {
s_slab_heap.Initialize(memory, memory_size);
}
static ALWAYS_INLINE Derived *Allocate() {
return s_slab_heap.Allocate();
}
static ALWAYS_INLINE void Free(Derived *obj) {
s_slab_heap.Free(obj);
}
static size_t GetObjectSize() { return s_slab_heap.GetObjectSize(); }
static size_t GetSlabHeapSize() { return s_slab_heap.GetSlabHeapSize(); }
static size_t GetPeakIndex() { return s_slab_heap.GetPeakIndex(); }
static uintptr_t GetSlabHeapAddress() { return s_slab_heap.GetSlabHeapAddress(); }
};
template<typename Derived, typename Base>
class KAutoObjectWithSlabHeapAndContainer : public Base {
static_assert(std::is_base_of<KAutoObjectWithList, Base>::value);
private:
static inline KSlabHeap<Derived> s_slab_heap;
static inline KAutoObjectWithListContainer s_container;
private:
static ALWAYS_INLINE Derived *Allocate() {
return s_slab_heap.Allocate();
}
static ALWAYS_INLINE void Free(Derived *obj) {
s_slab_heap.Free(obj);
}
public:
constexpr KAutoObjectWithSlabHeapAndContainer() : Base() { /* ... */ }
virtual ~KAutoObjectWithSlabHeapAndContainer() { /* ... */ }
virtual void Destroy() override {
const bool is_initialized = this->IsInitialized();
uintptr_t arg = 0;
if (is_initialized) {
s_container.Unregister(this);
arg = this->GetPostDestroyArgument();
this->Finalize();
}
Free(static_cast<Derived *>(this));
if (is_initialized) {
Derived::PostDestroy(arg);
}
}
virtual bool IsInitialized() const { return true; }
virtual uintptr_t GetPostDestroyArgument() const { return 0; }
size_t GetSlabIndex() const {
return s_slab_heap.GetIndex(static_cast<const Derived *>(this));
}
public:
static void InitializeSlabHeap(void *memory, size_t memory_size) {
s_slab_heap.Initialize(memory, memory_size);
s_container.Initialize();
}
static Derived *Create() {
Derived *obj = Allocate();
if (AMS_LIKELY(obj != nullptr)) {
KAutoObject::Create(obj);
}
return obj;
}
static Result Register(Derived *obj) {
return s_container.Register(obj);
}
static size_t GetObjectSize() { return s_slab_heap.GetObjectSize(); }
static size_t GetSlabHeapSize() { return s_slab_heap.GetSlabHeapSize(); }
static size_t GetPeakIndex() { return s_slab_heap.GetPeakIndex(); }
static uintptr_t GetSlabHeapAddress() { return s_slab_heap.GetSlabHeapAddress(); }
};
}

View file

@ -0,0 +1,25 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <mesosphere.hpp>
namespace ams::kern {
KAutoObject *KAutoObject::Create(KAutoObject *obj) {
obj->ref_count = 1;
return obj;
}
}

View file

@ -0,0 +1,51 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <mesosphere.hpp>
namespace ams::kern {
Result KAutoObjectWithListContainer::Register(KAutoObjectWithList *obj) {
KScopedLightLock lk(this->lock);
this->object_list.insert(*obj);
return ResultSuccess();
}
Result KAutoObjectWithListContainer::Unregister(KAutoObjectWithList *obj) {
KScopedLightLock lk(this->lock);
this->object_list.erase(this->object_list.iterator_to(*obj));
return ams::svc::ResultNotFound();
}
size_t KAutoObjectWithListContainer::GetOwnedCount(KProcess *owner) {
KScopedLightLock lk(this->lock);
size_t count = 0;
for (auto &obj : this->object_list) {
if (obj.GetOwner() == owner) {
count++;
}
}
return count;
}
}

View file

@ -0,0 +1,28 @@
/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <mesosphere.hpp>
namespace ams::kern {
void KLightLock::LockSlowPath(uintptr_t owner, uintptr_t cur_thread) {
/* TODO: Implement (requires KThread, KScheduler) */
}
void KLightLock::UnlockSlowPath(uintptr_t cur_thread) {
/* TODO: Implement (requires KThread, KScheduler) */
}
}

View file

@ -17,7 +17,7 @@
namespace ams::kern { namespace ams::kern {
NOINLINE void Kernel::Initialize(s32 core_id) { void Kernel::Initialize(s32 core_id) {
/* Construct the core local region object in place. */ /* Construct the core local region object in place. */
KCoreLocalContext *clc = GetPointer<KCoreLocalContext>(KMemoryLayout::GetCoreLocalRegionAddress()); KCoreLocalContext *clc = GetPointer<KCoreLocalContext>(KMemoryLayout::GetCoreLocalRegionAddress());
new (clc) KCoreLocalContext; new (clc) KCoreLocalContext;
@ -46,4 +46,9 @@ namespace ams::kern {
} }
} }
void Kernel::InitializeCoreThreads(s32 core_id) {
/* TODO: This function wants to setup the main thread and the idle thread. */
/* It also wants to initialize the scheduler/interrupt manager/hardware timer. */
}
} }

View file

@ -25,6 +25,15 @@ namespace ams::kern {
/* Ensure that all cores get to this point before proceeding. */ /* Ensure that all cores get to this point before proceeding. */
cpu::SynchronizeAllCores(); cpu::SynchronizeAllCores();
/* Initialize the main and idle thread for each core. */
/* Synchronize after each init to ensure the cores go in order. */
for (size_t i = 0; i < cpu::NumCores; i++) {
if (static_cast<s32>(i) == core_id) {
Kernel::InitializeCoreThreads(core_id);
}
cpu::SynchronizeAllCores();
}
/* TODO: Implement more of Main() */ /* TODO: Implement more of Main() */
while (true) { /* ... */ } while (true) { /* ... */ }
} }

View file

@ -274,7 +274,7 @@ namespace ams::util {
} }
private: private:
static constexpr TYPED_STORAGE(Derived) DerivedStorage = {}; static constexpr TYPED_STORAGE(Derived) DerivedStorage = {};
static_assert(std::addressof(GetParent(GetNode(GetPointer(DerivedStorage)))) == GetPointer(DerivedStorage)); static_assert(GetParent(GetNode(GetPointer(DerivedStorage))) == GetPointer(DerivedStorage));
}; };
template<class Derived> template<class Derived>

View file

@ -303,7 +303,7 @@ namespace ams::kern::init {
init_args->cpuactlr = cpu::GetCpuActlrEl1(); init_args->cpuactlr = cpu::GetCpuActlrEl1();
init_args->cpuectlr = cpu::GetCpuEctlrEl1(); init_args->cpuectlr = cpu::GetCpuEctlrEl1();
init_args->sctlr = cpu::GetSctlrEl1(); init_args->sctlr = cpu::GetSctlrEl1();
init_args->sp = GetInteger(KMemoryLayout::GetMainStackTopAddress(core_id)); init_args->sp = GetInteger(KMemoryLayout::GetMainStackTopAddress(core_id)) - sizeof(KThread::StackParameters);
init_args->entrypoint = reinterpret_cast<uintptr_t>(::ams::kern::HorizonKernelMain); init_args->entrypoint = reinterpret_cast<uintptr_t>(::ams::kern::HorizonKernelMain);
init_args->argument = static_cast<u64>(core_id); init_args->argument = static_cast<u64>(core_id);
init_args->setup_function = reinterpret_cast<uintptr_t>(::ams::kern::init::StartOtherCore); init_args->setup_function = reinterpret_cast<uintptr_t>(::ams::kern::init::StartOtherCore);