mirror of
https://github.com/Atmosphere-NX/Atmosphere
synced 2024-11-09 22:56:35 +00:00
mem: implement most of StandardAllocator (#860)
This was tested using `https://github.com/node-dot-cpp/alloc-test` plus a few other by-hand tests. It seems to work for the case we care about (sysmodules without thread cache-ing). External users are advised to build with assertions on and contact SciresM if you find issues. This is a lot of code to have gotten right in one go, and it was written mostly after midnight while sick, so there are probably un-noticed issues.
This commit is contained in:
parent
7502e2174f
commit
87ec045a98
47 changed files with 5473 additions and 43 deletions
|
@ -15,7 +15,7 @@ include $(DEVKITPRO)/libnx/switch_rules
|
|||
#---------------------------------------------------------------------------------
|
||||
# options for code generation
|
||||
#---------------------------------------------------------------------------------
|
||||
export DEFINES = $(ATMOSPHERE_DEFINES) -DATMOSPHERE_IS_STRATOSPHERE
|
||||
export DEFINES = $(ATMOSPHERE_DEFINES) -DATMOSPHERE_IS_STRATOSPHERE -D_GNU_SOURCE
|
||||
export SETTINGS = $(ATMOSPHERE_SETTINGS) -O2
|
||||
export CFLAGS = $(ATMOSPHERE_CFLAGS) $(SETTINGS) $(DEFINES) $(INCLUDE)
|
||||
export CXXFLAGS = $(CFLAGS) $(ATMOSPHERE_CXXFLAGS)
|
||||
|
|
|
@ -15,7 +15,7 @@ include $(DEVKITPRO)/libnx/switch_rules
|
|||
#---------------------------------------------------------------------------------
|
||||
# options for code generation
|
||||
#---------------------------------------------------------------------------------
|
||||
DEFINES := $(ATMOSPHERE_DEFINES) -DATMOSPHERE_IS_STRATOSPHERE
|
||||
DEFINES := $(ATMOSPHERE_DEFINES) -DATMOSPHERE_IS_STRATOSPHERE -D_GNU_SOURCE
|
||||
SETTINGS := $(ATMOSPHERE_SETTINGS) -O2
|
||||
CFLAGS := $(ATMOSPHERE_CFLAGS) $(SETTINGS) $(DEFINES) $(INCLUDE)
|
||||
CXXFLAGS := $(CFLAGS) $(ATMOSPHERE_CXXFLAGS) -flto
|
||||
|
|
|
@ -22,11 +22,15 @@
|
|||
/* Libstratosphere-only utility. */
|
||||
#include "stratosphere/util.hpp"
|
||||
|
||||
/* Sadly required shims. */
|
||||
#include "stratosphere/svc/svc_stratosphere_shims.hpp"
|
||||
|
||||
/* Critical modules with no dependencies. */
|
||||
#include "stratosphere/ams.hpp"
|
||||
#include "stratosphere/os.hpp"
|
||||
#include "stratosphere/dd.hpp"
|
||||
#include "stratosphere/lmem.hpp"
|
||||
#include "stratosphere/mem.hpp"
|
||||
|
||||
/* Pull in all ID definitions from NCM. */
|
||||
#include "stratosphere/ncm/ncm_ids.hpp"
|
||||
|
@ -54,7 +58,8 @@
|
|||
#include "stratosphere/spl.hpp"
|
||||
#include "stratosphere/updater.hpp"
|
||||
|
||||
|
||||
/* Include FS last. */
|
||||
#include "stratosphere/fs.hpp"
|
||||
#include "stratosphere/fssrv.hpp"
|
||||
#include "stratosphere/fssystem.hpp"
|
||||
#include "stratosphere/fssystem.hpp"
|
20
libraries/libstratosphere/include/stratosphere/mem.hpp
Normal file
20
libraries/libstratosphere/include/stratosphere/mem.hpp
Normal file
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <stratosphere/mem/mem_standard_allocator.hpp>
|
||||
#include <stratosphere/mem/impl/mem_impl_heap.hpp>
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <stratosphere/mem/impl/mem_impl_common.hpp>
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
class TlsHeapCache;
|
||||
|
||||
class CachedHeap final {
|
||||
NON_COPYABLE(CachedHeap);
|
||||
private:
|
||||
TlsHeapCache *tls_heap_cache;
|
||||
public:
|
||||
constexpr CachedHeap() : tls_heap_cache() { /* ... */ }
|
||||
~CachedHeap() { this->Finalize(); }
|
||||
|
||||
ALWAYS_INLINE CachedHeap(CachedHeap &&rhs) : tls_heap_cache(rhs.tls_heap_cache) {
|
||||
rhs.tls_heap_cache = nullptr;
|
||||
}
|
||||
ALWAYS_INLINE CachedHeap &operator=(CachedHeap &&rhs) {
|
||||
this->Reset();
|
||||
this->tls_heap_cache = rhs.tls_heap_cache;
|
||||
rhs.tls_heap_cache = nullptr;
|
||||
return *this;
|
||||
}
|
||||
|
||||
void *Allocate(size_t n);
|
||||
void *Allocate(size_t n, size_t align);
|
||||
size_t GetAllocationSize(const void *ptr);
|
||||
errno_t Free(void *p);
|
||||
errno_t FreeWithSize(void *p, size_t size);
|
||||
errno_t Reallocate(void *ptr, size_t size, void **p);
|
||||
errno_t Shrink(void *ptr, size_t size);
|
||||
|
||||
void ReleaseAllCache();
|
||||
void Finalize();
|
||||
bool CheckCache();
|
||||
errno_t QueryV(int query, std::va_list vl);
|
||||
errno_t Query(int query, ...);
|
||||
|
||||
void Reset() { this->Finalize(); }
|
||||
void Reset(TlsHeapCache *thc);
|
||||
TlsHeapCache *Release();
|
||||
|
||||
constexpr explicit ALWAYS_INLINE operator bool() const { return this->tls_heap_cache != nullptr; }
|
||||
};
|
||||
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <stratosphere/mem/impl/mem_impl_common.hpp>
|
||||
#include <stratosphere/mem/impl/mem_impl_declarations.hpp>
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
class CachedHeap;
|
||||
class TlsHeapCentral;
|
||||
|
||||
using HeapWalkCallback = int (*)(void *ptr, size_t size, void *user_data);
|
||||
|
||||
class CentralHeap final {
|
||||
NON_COPYABLE(CentralHeap);
|
||||
NON_MOVEABLE(CentralHeap);
|
||||
public:
|
||||
static constexpr size_t PageSize = 4_KB;
|
||||
static constexpr size_t MinimumAlignment = alignof(u64);
|
||||
using DestructorHandler = void (*)(void *start, void *end);
|
||||
private:
|
||||
TlsHeapCentral *tls_heap_central;
|
||||
bool use_virtual_memory;
|
||||
u32 option;
|
||||
u8 *start;
|
||||
u8 *end;
|
||||
public:
|
||||
constexpr CentralHeap() : tls_heap_central(), use_virtual_memory(), option(), start(), end() { /* ... */ }
|
||||
~CentralHeap() { this->Finalize(); }
|
||||
|
||||
errno_t Initialize(void *start, size_t size, u32 option);
|
||||
void Finalize();
|
||||
|
||||
ALWAYS_INLINE void *Allocate(size_t n) { return this->Allocate(n, MinimumAlignment); }
|
||||
void *Allocate(size_t n, size_t align);
|
||||
size_t GetAllocationSize(const void *ptr);
|
||||
errno_t Free(void *p);
|
||||
errno_t FreeWithSize(void *p, size_t size);
|
||||
errno_t Reallocate(void *ptr, size_t size, void **p);
|
||||
errno_t Shrink(void *ptr, size_t size);
|
||||
|
||||
bool MakeCache(CachedHeap *cached_heap);
|
||||
errno_t WalkAllocatedPointers(HeapWalkCallback callback, void *user_data);
|
||||
errno_t QueryV(int query, std::va_list vl);
|
||||
errno_t Query(int query, ...);
|
||||
private:
|
||||
errno_t QueryVImpl(int query, std::va_list *vl_ptr);
|
||||
};
|
||||
|
||||
static_assert(sizeof(CentralHeap) <= sizeof(::ams::mem::impl::InternalCentralHeapStorage));
|
||||
static_assert(alignof(CentralHeap) <= alignof(void *));
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <errno.h>
|
||||
|
||||
namespace ams::mem::impl {
|
||||
|
||||
constexpr inline size_t MaxSize = static_cast<size_t>(std::numeric_limits<s64>::max());
|
||||
|
||||
using errno_t = int;
|
||||
|
||||
enum DumpMode {
|
||||
DumpMode_Basic = (0 << 0),
|
||||
DumpMode_Spans = (1 << 0),
|
||||
DumpMode_Pointers = (1 << 1),
|
||||
DumpMode_Pages = (1 << 2),
|
||||
DumpMode_All = (DumpMode_Pages | DumpMode_Pointers | DumpMode_Spans | DumpMode_Basic),
|
||||
};
|
||||
|
||||
enum AllocQuery {
|
||||
AllocQuery_Dump = 0,
|
||||
AllocQuery_PageSize = 1,
|
||||
AllocQuery_AllocatedSize = 2,
|
||||
AllocQuery_FreeSize = 3,
|
||||
AllocQuery_SystemSize = 4,
|
||||
AllocQuery_MaxAllocatableSize = 5,
|
||||
AllocQuery_IsClean = 6,
|
||||
AllocQuery_HeapHash = 7,
|
||||
AllocQuery_UnifyFreeList = 8,
|
||||
AllocQuery_SetColor = 9,
|
||||
AllocQuery_GetColor = 10,
|
||||
AllocQuery_SetName = 11,
|
||||
AllocQuery_GetName = 12,
|
||||
/* AllocQuery_Thirteen = 13, */
|
||||
AllocQuery_CheckCache = 14,
|
||||
AllocQuery_ClearCache = 15,
|
||||
AllocQuery_FinalizeCache = 16,
|
||||
AllocQuery_FreeSizeMapped = 17,
|
||||
AllocQuery_MaxAllocatableSizeMapped = 18,
|
||||
AllocQuery_DumpJson = 19,
|
||||
};
|
||||
|
||||
enum HeapOption {
|
||||
HeapOption_UseEnvironment = (1 << 0),
|
||||
HeapOption_DisableCache = (1 << 2),
|
||||
};
|
||||
|
||||
struct HeapHash {
|
||||
size_t alloc_count;
|
||||
size_t alloc_size;
|
||||
size_t hash;
|
||||
};
|
||||
static_assert(std::is_pod<HeapHash>::value);
|
||||
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
namespace ams::mem::impl {
|
||||
|
||||
namespace heap {
|
||||
|
||||
class CentralHeap;
|
||||
|
||||
}
|
||||
|
||||
using InternalCentralHeapStorage = ::ams::util::TypedStorage<::ams::mem::impl::heap::CentralHeap, sizeof(void *) * 6, alignof(void *)>;
|
||||
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <stratosphere/mem/impl/mem_impl_common.hpp>
|
||||
#include <stratosphere/mem/impl/heap/mem_impl_heap_cached_heap.hpp>
|
||||
#include <stratosphere/mem/impl/heap/mem_impl_heap_central_heap.hpp>
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Copyright (c) 2019-2020 Adubbz, Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere/os.hpp>
|
||||
#include <stratosphere/mem/impl/mem_impl_declarations.hpp>
|
||||
|
||||
namespace ams::mem {
|
||||
|
||||
class StandardAllocator {
|
||||
NON_COPYABLE(StandardAllocator);
|
||||
NON_MOVEABLE(StandardAllocator);
|
||||
public:
|
||||
using WalkCallback = int (*)(void *ptr, size_t size, void *user_data);
|
||||
|
||||
struct AllocatorHash {
|
||||
size_t allocated_count;
|
||||
size_t allocated_size;
|
||||
size_t hash;
|
||||
};
|
||||
private:
|
||||
bool initialized;
|
||||
bool enable_thread_cache;
|
||||
uintptr_t unused;
|
||||
os::TlsSlot tls_slot;
|
||||
impl::InternalCentralHeapStorage central_heap_storage;
|
||||
public:
|
||||
StandardAllocator();
|
||||
StandardAllocator(void *mem, size_t size);
|
||||
StandardAllocator(void *mem, size_t size, bool enable_cache);
|
||||
|
||||
~StandardAllocator() {
|
||||
if (this->initialized) {
|
||||
this->Finalize();
|
||||
}
|
||||
}
|
||||
|
||||
void Initialize(void *mem, size_t size);
|
||||
void Initialize(void *mem, size_t size, bool enable_cache);
|
||||
void Finalize();
|
||||
|
||||
void *Allocate(size_t size);
|
||||
void *Allocate(size_t size, size_t alignment);
|
||||
void Free(void *ptr);
|
||||
void *Reallocate(void *ptr, size_t new_size);
|
||||
size_t Shrink(void *ptr, size_t new_size);
|
||||
|
||||
void ClearThreadCache() const;
|
||||
void CleanUpManagementArea() const;
|
||||
|
||||
size_t GetSizeOf(const void *ptr) const;
|
||||
size_t GetTotalFreeSize() const;
|
||||
size_t GetAllocatableSize() const;
|
||||
|
||||
void WalkAllocatedBlocks(WalkCallback callback, void *user_data) const;
|
||||
|
||||
void Dump() const;
|
||||
AllocatorHash Hash() const;
|
||||
};
|
||||
|
||||
}
|
|
@ -16,21 +16,25 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include "os/os_common_types.hpp"
|
||||
#include "os/os_memory_common.hpp"
|
||||
#include "os/os_tick.hpp"
|
||||
#include "os/os_managed_handle.hpp"
|
||||
#include "os/os_process_handle.hpp"
|
||||
#include "os/os_random.hpp"
|
||||
#include "os/os_mutex.hpp"
|
||||
#include "os/os_condvar.hpp"
|
||||
#include "os/os_rw_lock.hpp"
|
||||
#include "os/os_semaphore.hpp"
|
||||
#include "os/os_timeout_helper.hpp"
|
||||
#include "os/os_event.hpp"
|
||||
#include "os/os_system_event.hpp"
|
||||
#include "os/os_interrupt_event.hpp"
|
||||
#include "os/os_thread.hpp"
|
||||
#include "os/os_message_queue.hpp"
|
||||
#include "os/os_waitable_holder.hpp"
|
||||
#include "os/os_waitable_manager.hpp"
|
||||
#include <stratosphere/os/os_common_types.hpp>
|
||||
#include <stratosphere/os/os_tick.hpp>
|
||||
#include <stratosphere/os/os_memory_common.hpp>
|
||||
#include <stratosphere/os/os_memory_permission.hpp>
|
||||
#include <stratosphere/os/os_memory_heap_api.hpp>
|
||||
#include <stratosphere/os/os_memory_virtual_address_api.hpp>
|
||||
#include <stratosphere/os/os_managed_handle.hpp>
|
||||
#include <stratosphere/os/os_process_handle.hpp>
|
||||
#include <stratosphere/os/os_random.hpp>
|
||||
#include <stratosphere/os/os_mutex.hpp>
|
||||
#include <stratosphere/os/os_condvar.hpp>
|
||||
#include <stratosphere/os/os_rw_lock.hpp>
|
||||
#include <stratosphere/os/os_semaphore.hpp>
|
||||
#include <stratosphere/os/os_timeout_helper.hpp>
|
||||
#include <stratosphere/os/os_event.hpp>
|
||||
#include <stratosphere/os/os_system_event.hpp>
|
||||
#include <stratosphere/os/os_interrupt_event.hpp>
|
||||
#include <stratosphere/os/os_thread_local_storage_api.hpp>
|
||||
#include <stratosphere/os/os_thread.hpp>
|
||||
#include <stratosphere/os/os_message_queue.hpp>
|
||||
#include <stratosphere/os/os_waitable_holder.hpp>
|
||||
#include <stratosphere/os/os_waitable_manager.hpp>
|
||||
|
|
|
@ -22,4 +22,12 @@ namespace ams::os {
|
|||
|
||||
constexpr inline size_t MemoryBlockUnitSize = 0x200000;
|
||||
|
||||
enum MemoryPermission {
|
||||
MemoryPermission_None = (0 << 0),
|
||||
MemoryPermission_ReadOnly = (1 << 0),
|
||||
MemoryPermission_WriteOnly = (1 << 1),
|
||||
|
||||
MemoryPermission_ReadWrite = MemoryPermission_ReadOnly | MemoryPermission_WriteOnly,
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <stratosphere/os/os_common_types.hpp>
|
||||
#include <stratosphere/os/os_memory_common.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
Result AllocateMemoryBlock(uintptr_t *out_address, size_t size);
|
||||
void FreeMemoryBlock(uintptr_t address, size_t size);
|
||||
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <stratosphere/os/os_common_types.hpp>
|
||||
#include <stratosphere/os/os_memory_common.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
void SetMemoryPermission(uintptr_t address, size_t size, MemoryPermission perm);
|
||||
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <stratosphere/os/os_common_types.hpp>
|
||||
#include <stratosphere/os/os_memory_common.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
bool IsVirtualAddressMemoryEnabled();
|
||||
|
||||
}
|
|
@ -101,10 +101,19 @@ namespace ams::os {
|
|||
}
|
||||
};
|
||||
|
||||
NX_INLINE u32 GetCurrentThreadPriority() {
|
||||
u32 prio;
|
||||
ALWAYS_INLINE s32 GetCurrentThreadPriority() {
|
||||
s32 prio;
|
||||
R_ABORT_UNLESS(svcGetThreadPriority(&prio, CUR_THREAD_HANDLE));
|
||||
return prio;
|
||||
}
|
||||
|
||||
/* TODO: ThreadManager? */
|
||||
ALWAYS_INLINE s32 GetCurrentProcessorNumber() {
|
||||
return svcGetCurrentProcessorNumber();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE s32 GetCurrentCoreNumber() {
|
||||
return GetCurrentProcessorNumber();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <stratosphere/os/os_common_types.hpp>
|
||||
#include <stratosphere/os/os_memory_common.hpp>
|
||||
#include <stratosphere/os/os_thread_local_storage_common.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
Result AllocateTlsSlot(TlsSlot *out, TlsDestructor destructor);
|
||||
|
||||
void FreeTlsSlot(TlsSlot slot);
|
||||
|
||||
uintptr_t GetTlsValue(TlsSlot slot);
|
||||
void SetTlsValue(TlsSlot slot, uintptr_t value);
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <stratosphere/os/os_common_types.hpp>
|
||||
#include <stratosphere/os/os_memory_common.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
struct TlsSlot {
|
||||
u32 _value;
|
||||
};
|
||||
|
||||
using TlsDestructor = void (*)(uintptr_t arg);
|
||||
|
||||
constexpr inline size_t TlsSlotCountMax = 16;
|
||||
constexpr inline size_t SdkTlsSlotCountMax = 16;
|
||||
|
||||
}
|
|
@ -0,0 +1,491 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#if defined(ATMOSPHERE_BOARD_NINTENDO_NX) && defined(ATMOSPHERE_ARCH_ARM64)
|
||||
|
||||
namespace ams::svc::aarch64::lp64 {
|
||||
|
||||
ALWAYS_INLINE Result SetHeapSize(::ams::svc::Address *out_address, ::ams::svc::Size size) {
|
||||
return ::svcSetHeapSize(reinterpret_cast<void **>(out_address), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetMemoryPermission(::ams::svc::Address address, ::ams::svc::Size size, ::ams::svc::MemoryPermission perm) {
|
||||
return ::svcSetMemoryPermission(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size, static_cast<u32>(perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetMemoryAttribute(::ams::svc::Address address, ::ams::svc::Size size, uint32_t mask, uint32_t attr) {
|
||||
return ::svcSetMemoryAttribute(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size, mask, attr);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapMemory(::ams::svc::Address dst_address, ::ams::svc::Address src_address, ::ams::svc::Size size) {
|
||||
return ::svcMapMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(dst_address)), reinterpret_cast<void *>(static_cast<uintptr_t>(src_address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapMemory(::ams::svc::Address dst_address, ::ams::svc::Address src_address, ::ams::svc::Size size) {
|
||||
return ::svcUnmapMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(dst_address)), reinterpret_cast<void *>(static_cast<uintptr_t>(src_address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result QueryMemory(::ams::svc::UserPointer< ::ams::svc::lp64::MemoryInfo *> out_memory_info, ::ams::svc::PageInfo *out_page_info, ::ams::svc::Address address) {
|
||||
return ::svcQueryMemory(reinterpret_cast<::MemoryInfo *>(out_memory_info.GetPointerUnsafe()), reinterpret_cast<u32 *>(out_page_info), address);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ExitProcess() {
|
||||
return ::svcExitProcess();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateThread(::ams::svc::Handle *out_handle, ::ams::svc::ThreadFunc func, ::ams::svc::Address arg, ::ams::svc::Address stack_bottom, int32_t priority, int32_t core_id) {
|
||||
return ::svcCreateThread(out_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(func)), reinterpret_cast<void *>(static_cast<uintptr_t>(arg)), reinterpret_cast<void *>(static_cast<uintptr_t>(stack_bottom)), priority, core_id);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result StartThread(::ams::svc::Handle thread_handle) {
|
||||
return ::svcStartThread(thread_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ExitThread() {
|
||||
return ::svcExitThread();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void SleepThread(int64_t ns) {
|
||||
return ::svcSleepThread(ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetThreadPriority(int32_t *out_priority, ::ams::svc::Handle thread_handle) {
|
||||
return ::svcGetThreadPriority(out_priority, thread_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetThreadPriority(::ams::svc::Handle thread_handle, int32_t priority) {
|
||||
return ::svcSetThreadPriority(thread_handle, priority);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetThreadCoreMask(int32_t *out_core_id, uint64_t *out_affinity_mask, ::ams::svc::Handle thread_handle) {
|
||||
return ::svcGetThreadCoreMask(out_core_id, out_affinity_mask, thread_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetThreadCoreMask(::ams::svc::Handle thread_handle, int32_t core_id, uint64_t affinity_mask) {
|
||||
return ::svcSetThreadCoreMask(thread_handle, core_id, affinity_mask);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE int32_t GetCurrentProcessorNumber() {
|
||||
return ::svcGetCurrentProcessorNumber();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SignalEvent(::ams::svc::Handle event_handle) {
|
||||
return ::svcSignalEvent(event_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ClearEvent(::ams::svc::Handle event_handle) {
|
||||
return ::svcClearEvent(event_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapSharedMemory(::ams::svc::Handle shmem_handle, ::ams::svc::Address address, ::ams::svc::Size size, ::ams::svc::MemoryPermission map_perm) {
|
||||
return ::svcMapSharedMemory(shmem_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size, static_cast<u32>(map_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapSharedMemory(::ams::svc::Handle shmem_handle, ::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcUnmapSharedMemory(shmem_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateTransferMemory(::ams::svc::Handle *out_handle, ::ams::svc::Address address, ::ams::svc::Size size, ::ams::svc::MemoryPermission map_perm) {
|
||||
return ::svcCreateTransferMemory(out_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size, static_cast<u32>(map_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CloseHandle(::ams::svc::Handle handle) {
|
||||
return ::svcCloseHandle(handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ResetSignal(::ams::svc::Handle handle) {
|
||||
return ::svcResetSignal(handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result WaitSynchronization(int32_t *out_index, ::ams::svc::UserPointer<const ::ams::svc::Handle *> handles, int32_t numHandles, int64_t timeout_ns) {
|
||||
return ::svcWaitSynchronization(out_index, handles.GetPointerUnsafe(), numHandles, timeout_ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CancelSynchronization(::ams::svc::Handle handle) {
|
||||
return ::svcCancelSynchronization(handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ArbitrateLock(::ams::svc::Handle thread_handle, ::ams::svc::Address address, uint32_t tag) {
|
||||
return ::svcArbitrateLock(thread_handle, reinterpret_cast<u32 *>(static_cast<uintptr_t>(address)), tag);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ArbitrateUnlock(::ams::svc::Address address) {
|
||||
return ::svcArbitrateUnlock(reinterpret_cast<u32 *>(static_cast<uintptr_t>(address)));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result WaitProcessWideKeyAtomic(::ams::svc::Address address, ::ams::svc::Address cv_key, uint32_t tag, int64_t timeout_ns) {
|
||||
return ::svcWaitProcessWideKeyAtomic(reinterpret_cast<u32 *>(static_cast<uintptr_t>(address)), reinterpret_cast<u32 *>(static_cast<uintptr_t>(cv_key)), tag, timeout_ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void SignalProcessWideKey(::ams::svc::Address cv_key, int32_t count) {
|
||||
return ::svcSignalProcessWideKey(reinterpret_cast<u32 *>(static_cast<uintptr_t>(cv_key)), count);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE int64_t GetSystemTick() {
|
||||
return ::svcGetSystemTick();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ConnectToNamedPort(::ams::svc::Handle *out_handle, ::ams::svc::UserPointer<const char *> name) {
|
||||
return ::svcConnectToNamedPort(out_handle, name.GetPointerUnsafe());
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SendSyncRequestLight(::ams::svc::Handle session_handle) {
|
||||
return ::svcSendSyncRequestLight(session_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SendSyncRequest(::ams::svc::Handle session_handle) {
|
||||
return ::svcSendSyncRequest(session_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SendSyncRequestWithUserBuffer(::ams::svc::Address message_buffer, ::ams::svc::Size message_buffer_size, ::ams::svc::Handle session_handle) {
|
||||
return ::svcSendSyncRequestWithUserBuffer(reinterpret_cast<void *>(static_cast<uintptr_t>(message_buffer)), message_buffer_size, session_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SendAsyncRequestWithUserBuffer(::ams::svc::Handle *out_event_handle, ::ams::svc::Address message_buffer, ::ams::svc::Size message_buffer_size, ::ams::svc::Handle session_handle) {
|
||||
return ::svcSendAsyncRequestWithUserBuffer(out_event_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(message_buffer)), message_buffer_size, session_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetProcessId(uint64_t *out_process_id, ::ams::svc::Handle process_handle) {
|
||||
return ::svcGetProcessId(out_process_id, process_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetThreadId(uint64_t *out_thread_id, ::ams::svc::Handle thread_handle) {
|
||||
return ::svcGetThreadId(out_thread_id, thread_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void Break(::ams::svc::BreakReason break_reason, ::ams::svc::Address arg, ::ams::svc::Size size) {
|
||||
::svcBreak(break_reason, arg, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result OutputDebugString(::ams::svc::UserPointer<const char *> debug_str, ::ams::svc::Size len) {
|
||||
return ::svcOutputDebugString(debug_str.GetPointerUnsafe(), len);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ReturnFromException(::ams::Result result) {
|
||||
return ::svcReturnFromException(result.GetValue());
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetInfo(uint64_t *out, ::ams::svc::InfoType info_type, ::ams::svc::Handle handle, uint64_t info_subtype) {
|
||||
return ::svcGetInfo(out, static_cast<u32>(info_type), handle, info_subtype);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void FlushEntireDataCache() {
|
||||
return ::svcFlushEntireDataCache();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result FlushDataCache(::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcFlushDataCache(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapPhysicalMemory(::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcMapPhysicalMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapPhysicalMemory(::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcUnmapPhysicalMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetDebugFutureThreadInfo(::ams::svc::lp64::LastThreadContext *out_context, uint64_t *thread_id, ::ams::svc::Handle debug_handle, int64_t ns) {
|
||||
return ::svcGetDebugFutureThreadInfo(reinterpret_cast<::LastThreadContext *>(out_context), thread_id, debug_handle, ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetLastThreadInfo(::ams::svc::lp64::LastThreadContext *out_context, ::ams::svc::Address *out_tls_address, uint32_t *out_flags) {
|
||||
return ::svcGetLastThreadInfo(reinterpret_cast<::LastThreadContext *>(out_context), reinterpret_cast<u64 *>(out_tls_address), out_flags);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetResourceLimitLimitValue(int64_t *out_limit_value, ::ams::svc::Handle resource_limit_handle, ::ams::svc::LimitableResource which) {
|
||||
return ::svcGetResourceLimitLimitValue(out_limit_value, resource_limit_handle, static_cast<::LimitableResource>(which));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetResourceLimitCurrentValue(int64_t *out_current_value, ::ams::svc::Handle resource_limit_handle, ::ams::svc::LimitableResource which) {
|
||||
return ::svcGetResourceLimitCurrentValue(out_current_value, resource_limit_handle, static_cast<::LimitableResource>(which));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetThreadActivity(::ams::svc::Handle thread_handle, ::ams::svc::ThreadActivity thread_activity) {
|
||||
return ::svcSetThreadActivity(thread_handle, static_cast<::ThreadActivity>(thread_activity));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetThreadContext3(::ams::svc::UserPointer< ::ams::svc::ThreadContext *> out_context, ::ams::svc::Handle thread_handle) {
|
||||
return ::svcGetThreadContext3(reinterpret_cast<::ThreadContext *>(out_context.GetPointerUnsafe()), thread_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result WaitForAddress(::ams::svc::Address address, ::ams::svc::ArbitrationType arb_type, int32_t value, int64_t timeout_ns) {
|
||||
return ::svcWaitForAddress(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), arb_type, value, timeout_ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SignalToAddress(::ams::svc::Address address, ::ams::svc::SignalType signal_type, int32_t value, int32_t count) {
|
||||
return ::svcSignalToAddress(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), signal_type, value, count);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void SynchronizePreemptionState() {
|
||||
return ::svcSynchronizePreemptionState();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void KernelDebug(::ams::svc::KernelDebugType kern_debug_type, uint64_t arg0, uint64_t arg1, uint64_t arg2) {
|
||||
return ::svcKernelDebug(kern_debug_type, arg0, arg1, arg2);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ChangeKernelTraceState(::ams::svc::KernelTraceState kern_trace_state) {
|
||||
return ::svcChangeKernelTraceState(kern_trace_state);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateSession(::ams::svc::Handle *out_server_session_handle, ::ams::svc::Handle *out_client_session_handle, bool is_light, ::ams::svc::Address name) {
|
||||
return ::svcCreateSession(out_server_session_handle, out_client_session_handle, is_light, name);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result AcceptSession(::ams::svc::Handle *out_handle, ::ams::svc::Handle port) {
|
||||
return ::svcAcceptSession(out_handle, port);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ReplyAndReceiveLight(::ams::svc::Handle handle) {
|
||||
return ::svcReplyAndReceiveLight(handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ReplyAndReceive(int32_t *out_index, ::ams::svc::UserPointer<const ::ams::svc::Handle *> handles, int32_t num_handles, ::ams::svc::Handle reply_target, int64_t timeout_ns) {
|
||||
return ::svcReplyAndReceive(out_index, handles.GetPointerUnsafe(), num_handles, reply_target, timeout_ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ReplyAndReceiveWithUserBuffer(int32_t *out_index, ::ams::svc::Address message_buffer, ::ams::svc::Size message_buffer_size, ::ams::svc::UserPointer<const ::ams::svc::Handle *> handles, int32_t num_handles, ::ams::svc::Handle reply_target, int64_t timeout_ns) {
|
||||
return ::svcReplyAndReceiveWithUserBuffer(out_index, reinterpret_cast<void *>(static_cast<uintptr_t>(message_buffer)), message_buffer_size, handles.GetPointerUnsafe(), num_handles, reply_target, timeout_ns);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateEvent(::ams::svc::Handle *out_write_handle, ::ams::svc::Handle *out_read_handle) {
|
||||
return ::svcCreateEvent(out_write_handle, out_read_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapPhysicalMemoryUnsafe(::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcMapPhysicalMemoryUnsafe(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapPhysicalMemoryUnsafe(::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcUnmapPhysicalMemoryUnsafe(reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetUnsafeLimit(::ams::svc::Size limit) {
|
||||
return ::svcSetUnsafeLimit(limit);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateCodeMemory(::ams::svc::Handle *out_handle, ::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcCreateCodeMemory(out_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ControlCodeMemory(::ams::svc::Handle code_memory_handle, ::ams::svc::CodeMemoryOperation operation, uint64_t address, uint64_t size, ::ams::svc::MemoryPermission perm) {
|
||||
return ::svcControlCodeMemory(code_memory_handle, static_cast<::CodeMapOperation>(operation), reinterpret_cast<void *>(address), size, static_cast<u32>(perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void SleepSystem() {
|
||||
return ::svcSleepSystem();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ReadWriteRegister(uint32_t *out_value, ::ams::svc::PhysicalAddress address, uint32_t mask, uint32_t value) {
|
||||
return ::svcReadWriteRegister(out_value, address, mask, value);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetProcessActivity(::ams::svc::Handle process_handle, ::ams::svc::ProcessActivity process_activity) {
|
||||
return ::svcSetProcessActivity(process_handle, static_cast<::ProcessActivity>(process_activity));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateSharedMemory(::ams::svc::Handle *out_handle, ::ams::svc::Size size, ::ams::svc::MemoryPermission owner_perm, ::ams::svc::MemoryPermission remote_perm) {
|
||||
return ::svcCreateSharedMemory(out_handle, size, static_cast<u32>(owner_perm), static_cast<u32>(remote_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapTransferMemory(::ams::svc::Handle trmem_handle, ::ams::svc::Address address, ::ams::svc::Size size, ::ams::svc::MemoryPermission owner_perm) {
|
||||
return ::svcMapTransferMemory(trmem_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size, static_cast<u32>(owner_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapTransferMemory(::ams::svc::Handle trmem_handle, ::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcUnmapTransferMemory(trmem_handle, reinterpret_cast<void *>(static_cast<uintptr_t>(address)), size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateInterruptEvent(::ams::svc::Handle *out_read_handle, int32_t interrupt_id, ::ams::svc::InterruptType interrupt_type) {
|
||||
return ::svcCreateInterruptEvent(out_read_handle, interrupt_id, static_cast<u32>(interrupt_type));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result QueryPhysicalAddress(::ams::svc::lp64::PhysicalMemoryInfo *out_info, ::ams::svc::Address address) {
|
||||
return ::svcQueryPhysicalAddress(reinterpret_cast<::PhysicalMemoryInfo *>(out_info), address);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result QueryIoMapping(::ams::svc::Address *out_address, ::ams::svc::PhysicalAddress physical_address, ::ams::svc::Size size) {
|
||||
return ::svcQueryIoMapping(reinterpret_cast<u64 *>(out_address), physical_address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateDeviceAddressSpace(::ams::svc::Handle *out_handle, uint64_t das_address, uint64_t das_size) {
|
||||
return ::svcCreateDeviceAddressSpace(out_handle, das_address, das_size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result AttachDeviceAddressSpace(::ams::svc::DeviceName device_name, ::ams::svc::Handle das_handle) {
|
||||
return ::svcAttachDeviceAddressSpace(static_cast<u64>(device_name), das_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result DetachDeviceAddressSpace(::ams::svc::DeviceName device_name, ::ams::svc::Handle das_handle) {
|
||||
return ::svcDetachDeviceAddressSpace(static_cast<u64>(device_name), das_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapDeviceAddressSpaceByForce(::ams::svc::Handle das_handle, ::ams::svc::Handle process_handle, uint64_t process_address, ::ams::svc::Size size, uint64_t device_address, ::ams::svc::MemoryPermission device_perm) {
|
||||
return ::svcMapDeviceAddressSpaceByForce(das_handle, process_handle, process_address, size, device_address, static_cast<u32>(device_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapDeviceAddressSpaceAligned(::ams::svc::Handle das_handle, ::ams::svc::Handle process_handle, uint64_t process_address, ::ams::svc::Size size, uint64_t device_address, ::ams::svc::MemoryPermission device_perm) {
|
||||
return ::svcMapDeviceAddressSpaceAligned(das_handle, process_handle, process_address, size, device_address, static_cast<u32>(device_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapDeviceAddressSpace(::ams::svc::Size *out_mapped_size, ::ams::svc::Handle das_handle, ::ams::svc::Handle process_handle, uint64_t process_address, ::ams::svc::Size size, uint64_t device_address, ::ams::svc::MemoryPermission device_perm) {
|
||||
return ::svcMapDeviceAddressSpace(reinterpret_cast<u64 *>(out_mapped_size), das_handle, process_handle, process_address, size, device_address, static_cast<u32>(device_perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapDeviceAddressSpace(::ams::svc::Handle das_handle, ::ams::svc::Handle process_handle, uint64_t process_address, ::ams::svc::Size size, uint64_t device_address) {
|
||||
return ::svcUnmapDeviceAddressSpace(das_handle, process_handle, process_address, size, device_address);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result InvalidateProcessDataCache(::ams::svc::Handle process_handle, uint64_t address, uint64_t size) {
|
||||
return ::svcInvalidateProcessDataCache(process_handle, address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result StoreProcessDataCache(::ams::svc::Handle process_handle, uint64_t address, uint64_t size) {
|
||||
return ::svcStoreProcessDataCache(process_handle, address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result FlushProcessDataCache(::ams::svc::Handle process_handle, uint64_t address, uint64_t size) {
|
||||
return ::svcFlushProcessDataCache(process_handle, address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result DebugActiveProcess(::ams::svc::Handle *out_handle, uint64_t process_id) {
|
||||
return ::svcDebugActiveProcess(out_handle, process_id);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result BreakDebugProcess(::ams::svc::Handle debug_handle) {
|
||||
return ::svcBreakDebugProcess(debug_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result TerminateDebugProcess(::ams::svc::Handle debug_handle) {
|
||||
return ::svcTerminateDebugProcess(debug_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetDebugEvent(::ams::svc::UserPointer< ::ams::svc::lp64::DebugEventInfo *> out_info, ::ams::svc::Handle debug_handle) {
|
||||
return ::svcGetDebugEvent(out_info.GetPointerUnsafe(), debug_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ContinueDebugEvent(::ams::svc::Handle debug_handle, uint32_t flags, ::ams::svc::UserPointer<const uint64_t *> thread_ids, int32_t num_thread_ids) {
|
||||
return ::svcContinueDebugEvent(debug_handle, flags, const_cast<u64 *>(thread_ids.GetPointerUnsafe()), num_thread_ids);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetProcessList(int32_t *out_num_processes, ::ams::svc::UserPointer<uint64_t *> out_process_ids, int32_t max_out_count) {
|
||||
return ::svcGetProcessList(out_num_processes, out_process_ids.GetPointerUnsafe(), max_out_count);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetThreadList(int32_t *out_num_threads, ::ams::svc::UserPointer<uint64_t *> out_thread_ids, int32_t max_out_count, ::ams::svc::Handle debug_handle) {
|
||||
return ::svcGetThreadList(out_num_threads, out_thread_ids.GetPointerUnsafe(), max_out_count, debug_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetDebugThreadContext(::ams::svc::UserPointer< ::ams::svc::ThreadContext *> out_context, ::ams::svc::Handle debug_handle, uint64_t thread_id, uint32_t context_flags) {
|
||||
return ::svcGetDebugThreadContext(reinterpret_cast<::ThreadContext *>(out_context.GetPointerUnsafe()), debug_handle, thread_id, context_flags);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetDebugThreadContext(::ams::svc::Handle debug_handle, uint64_t thread_id, ::ams::svc::UserPointer<const ::ams::svc::ThreadContext *> context, uint32_t context_flags) {
|
||||
return ::svcSetDebugThreadContext(debug_handle, thread_id, reinterpret_cast<const ::ThreadContext *>(context.GetPointerUnsafe()), context_flags);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result QueryDebugProcessMemory(::ams::svc::UserPointer< ::ams::svc::lp64::MemoryInfo *> out_memory_info, ::ams::svc::PageInfo *out_page_info, ::ams::svc::Handle process_handle, ::ams::svc::Address address) {
|
||||
return ::svcQueryDebugProcessMemory(reinterpret_cast<::MemoryInfo *>(out_memory_info.GetPointerUnsafe()), reinterpret_cast<u32 *>(out_page_info), process_handle, address);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ReadDebugProcessMemory(::ams::svc::Address buffer, ::ams::svc::Handle debug_handle, ::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcReadDebugProcessMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(buffer)), debug_handle, address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result WriteDebugProcessMemory(::ams::svc::Handle debug_handle, ::ams::svc::Address buffer, ::ams::svc::Address address, ::ams::svc::Size size) {
|
||||
return ::svcWriteDebugProcessMemory(debug_handle, reinterpret_cast<const void *>(static_cast<uintptr_t>(buffer)), address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetHardwareBreakPoint(::ams::svc::HardwareBreakPointRegisterName name, uint64_t flags, uint64_t value) {
|
||||
return ::svcSetHardwareBreakPoint(static_cast<u32>(name), flags, value);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetDebugThreadParam(uint64_t *out_64, uint32_t *out_32, ::ams::svc::Handle debug_handle, uint64_t thread_id, ::ams::svc::DebugThreadParam param) {
|
||||
return ::svcGetDebugThreadParam(out_64, out_32, debug_handle, thread_id, static_cast<::DebugThreadParam>(param));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetSystemInfo(uint64_t *out, ::ams::svc::SystemInfoType info_type, ::ams::svc::Handle handle, uint64_t info_subtype) {
|
||||
return ::svcGetSystemInfo(out, static_cast<u64>(info_type), handle, info_subtype);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreatePort(::ams::svc::Handle *out_server_handle, ::ams::svc::Handle *out_client_handle, int32_t max_sessions, bool is_light, ::ams::svc::Address name) {
|
||||
return ::svcCreatePort(out_server_handle, out_client_handle, max_sessions, is_light, reinterpret_cast<const char *>(static_cast<uintptr_t>(name)));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ManageNamedPort(::ams::svc::Handle *out_server_handle, ::ams::svc::UserPointer<const char *> name, int32_t max_sessions) {
|
||||
return ::svcManageNamedPort(out_server_handle, name.GetPointerUnsafe(), max_sessions);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result ConnectToPort(::ams::svc::Handle *out_handle, ::ams::svc::Handle port) {
|
||||
return ::svcConnectToPort(out_handle, port);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetProcessMemoryPermission(::ams::svc::Handle process_handle, uint64_t address, uint64_t size, ::ams::svc::MemoryPermission perm) {
|
||||
return ::svcSetProcessMemoryPermission(process_handle, address, size, static_cast<u32>(perm));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapProcessMemory(::ams::svc::Address dst_address, ::ams::svc::Handle process_handle, uint64_t src_address, ::ams::svc::Size size) {
|
||||
return ::svcMapProcessMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(dst_address)), process_handle, src_address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapProcessMemory(::ams::svc::Address dst_address, ::ams::svc::Handle process_handle, uint64_t src_address, ::ams::svc::Size size) {
|
||||
return ::svcUnmapProcessMemory(reinterpret_cast<void *>(static_cast<uintptr_t>(dst_address)), process_handle, src_address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result QueryProcessMemory(::ams::svc::UserPointer< ::ams::svc::lp64::MemoryInfo *> out_memory_info, ::ams::svc::PageInfo *out_page_info, ::ams::svc::Handle process_handle, uint64_t address) {
|
||||
return ::svcQueryProcessMemory(reinterpret_cast<::MemoryInfo *>(out_memory_info.GetPointerUnsafe()), reinterpret_cast<u32 *>(out_page_info), process_handle, address);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result MapProcessCodeMemory(::ams::svc::Handle process_handle, uint64_t dst_address, uint64_t src_address, uint64_t size) {
|
||||
return ::svcMapProcessCodeMemory(process_handle, dst_address, src_address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result UnmapProcessCodeMemory(::ams::svc::Handle process_handle, uint64_t dst_address, uint64_t src_address, uint64_t size) {
|
||||
return ::svcUnmapProcessCodeMemory(process_handle, dst_address, src_address, size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateProcess(::ams::svc::Handle *out_handle, ::ams::svc::UserPointer<const ::ams::svc::lp64::CreateProcessParameter *> parameters, ::ams::svc::UserPointer<const uint32_t *> caps, int32_t num_caps) {
|
||||
return ::svcCreateProcess(out_handle, parameters.GetPointerUnsafe(), caps.GetPointerUnsafe(), num_caps);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result StartProcess(::ams::svc::Handle process_handle, int32_t priority, int32_t core_id, uint64_t main_thread_stack_size) {
|
||||
return ::svcStartProcess(process_handle, priority, core_id, main_thread_stack_size);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result TerminateProcess(::ams::svc::Handle process_handle) {
|
||||
return ::svcTerminateProcess(process_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result GetProcessInfo(int64_t *out_info, ::ams::svc::Handle process_handle, ::ams::svc::ProcessInfoType info_type) {
|
||||
return ::svcGetProcessInfo(out_info, process_handle, static_cast<::ProcessInfoType>(info_type));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result CreateResourceLimit(::ams::svc::Handle *out_handle) {
|
||||
return ::svcCreateResourceLimit(out_handle);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Result SetResourceLimitLimitValue(::ams::svc::Handle resource_limit_handle, ::ams::svc::LimitableResource which, int64_t limit_value) {
|
||||
return ::svcSetResourceLimitLimitValue(resource_limit_handle, static_cast<::LimitableResource>(which), limit_value);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void CallSecureMonitor(::ams::svc::lp64::SecureMonitorArguments *args) {
|
||||
::svcCallSecureMonitor(reinterpret_cast<::SecmonArgs *>(args));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
|
@ -41,7 +41,7 @@ namespace ams::diag {
|
|||
__builtin_unreachable();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void DebugLog(const char *format, ...) __attribute__((format(printf, 1, 2)));
|
||||
inline void DebugLog(const char *format, ...) __attribute__((format(printf, 1, 2)));
|
||||
|
||||
#ifdef AMS_ENABLE_DEBUG_PRINT
|
||||
os::Mutex g_debug_log_lock;
|
||||
|
@ -55,7 +55,7 @@ namespace ams::diag {
|
|||
svc::OutputDebugString(g_debug_buffer, strlen(g_debug_buffer));
|
||||
}
|
||||
|
||||
void DebugLog(const char *format, ...) __attribute__((format(printf, 1, 2))) {
|
||||
void DebugLog(const char *format, ...) {
|
||||
::std::va_list vl;
|
||||
va_start(vl, format);
|
||||
DebugLogImpl(format, vl);
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
void *CachedHeap::Allocate(size_t n) {
|
||||
return this->tls_heap_cache->Allocate(n);
|
||||
}
|
||||
|
||||
void *CachedHeap::Allocate(size_t n, size_t align) {
|
||||
return this->tls_heap_cache->Allocate(n, align);
|
||||
}
|
||||
|
||||
size_t CachedHeap::GetAllocationSize(const void *ptr) {
|
||||
return this->tls_heap_cache->GetAllocationSize(ptr);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Free(void *p) {
|
||||
return this->tls_heap_cache->Free(p);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::FreeWithSize(void *p, size_t size) {
|
||||
return this->tls_heap_cache->FreeWithSize(p, size);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Reallocate(void *ptr, size_t size, void **p) {
|
||||
return this->tls_heap_cache->Reallocate(ptr, size, p);
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Shrink(void *ptr, size_t size) {
|
||||
return this->tls_heap_cache->Shrink(ptr, size);
|
||||
}
|
||||
|
||||
void CachedHeap::ReleaseAllCache() {
|
||||
if (this->tls_heap_cache) {
|
||||
this->tls_heap_cache->ReleaseAllCache();
|
||||
}
|
||||
}
|
||||
|
||||
void CachedHeap::Finalize() {
|
||||
if (this->tls_heap_cache) {
|
||||
this->tls_heap_cache->Finalize();
|
||||
this->tls_heap_cache = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool CachedHeap::CheckCache() {
|
||||
bool cache = false;
|
||||
auto err = this->Query(AllocQuery_CheckCache, std::addressof(cache));
|
||||
AMS_ASSERT(err != 0);
|
||||
return cache;
|
||||
}
|
||||
|
||||
errno_t CachedHeap::QueryV(int _query, std::va_list vl) {
|
||||
const AllocQuery query = static_cast<AllocQuery>(_query);
|
||||
switch (query) {
|
||||
case AllocQuery_CheckCache:
|
||||
{
|
||||
bool *out = va_arg(vl, bool *);
|
||||
if (out) {
|
||||
*out = (this->tls_heap_cache == nullptr) || this->tls_heap_cache->CheckCache();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_ClearCache:
|
||||
{
|
||||
this->ReleaseAllCache();
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_FinalizeCache:
|
||||
{
|
||||
this->Finalize();
|
||||
return 0;
|
||||
}
|
||||
default:
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CachedHeap::Query(int query, ...) {
|
||||
std::va_list vl;
|
||||
va_start(vl, query);
|
||||
auto err = this->QueryV(query, vl);
|
||||
va_end(vl);
|
||||
return err;
|
||||
}
|
||||
|
||||
void CachedHeap::Reset(TlsHeapCache *thc) {
|
||||
this->Finalize();
|
||||
this->tls_heap_cache = thc;
|
||||
}
|
||||
|
||||
TlsHeapCache *CachedHeap::Release() {
|
||||
TlsHeapCache *ret = this->tls_heap_cache;
|
||||
this->tls_heap_cache = nullptr;
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,409 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
errno_t CentralHeap::Initialize(void *start, size_t size, u32 option) {
|
||||
/* Validate size. */
|
||||
if (size == 0 || !util::IsAligned(size, PageSize)) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Don't allow initializing twice. */
|
||||
if (this->start) {
|
||||
return EEXIST;
|
||||
}
|
||||
|
||||
if (start) {
|
||||
/* We were provided with a region to use as backing memory. */
|
||||
u8 *aligned_start = reinterpret_cast<u8 *>(util::AlignUp(reinterpret_cast<uintptr_t>(start), PageSize));
|
||||
u8 *aligned_end = reinterpret_cast<u8 *>(util::AlignDown(reinterpret_cast<uintptr_t>(start) + size, PageSize));
|
||||
if (aligned_start >= aligned_end) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
this->start = aligned_start;
|
||||
this->end = aligned_end;
|
||||
this->option = option;
|
||||
this->tls_heap_central = new (this->start) TlsHeapCentral;
|
||||
if (auto err = this->tls_heap_central->Initialize(this->start, this->end - this->start, false); err != 0) {
|
||||
this->tls_heap_central->~TlsHeapCentral();
|
||||
this->tls_heap_central = nullptr;
|
||||
AMS_ASSERT(err == 0);
|
||||
return err;
|
||||
}
|
||||
this->use_virtual_memory = false;
|
||||
} else {
|
||||
/* We were not provided with a region to use as backing. */
|
||||
void *mem;
|
||||
if (auto err = AllocateVirtualMemory(std::addressof(mem), size); err != 0) {
|
||||
return err;
|
||||
}
|
||||
if (!util::IsAligned(reinterpret_cast<uintptr_t>(mem), PageSize)) {
|
||||
FreeVirtualMemory(mem, size);
|
||||
size += PageSize;
|
||||
if (auto err = AllocateVirtualMemory(std::addressof(mem), size); err != 0) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
this->start = static_cast<u8 *>(mem);
|
||||
this->end = this->start + size;
|
||||
this->option = option;
|
||||
void *central = reinterpret_cast<void *>(util::AlignUp(reinterpret_cast<uintptr_t>(mem), PageSize));
|
||||
if (auto err = AllocatePhysicalMemory(central, sizeof(TlsHeapCentral)); err != 0) {
|
||||
return err;
|
||||
}
|
||||
this->tls_heap_central = new (central) TlsHeapCentral;
|
||||
if (auto err = this->tls_heap_central->Initialize(central, size, true); err != 0) {
|
||||
this->tls_heap_central->~TlsHeapCentral();
|
||||
this->tls_heap_central = nullptr;
|
||||
AMS_ASSERT(err == 0);
|
||||
return err;
|
||||
}
|
||||
this->use_virtual_memory = true;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void CentralHeap::Finalize() {
|
||||
if (this->tls_heap_central) {
|
||||
this->tls_heap_central->~TlsHeapCentral();
|
||||
}
|
||||
if (this->use_virtual_memory) {
|
||||
mem::impl::physical_free(util::AlignUp(static_cast<void *>(this->start), PageSize), this->end - this->start);
|
||||
mem::impl::virtual_free(this->start, this->end - this->start);
|
||||
}
|
||||
this->tls_heap_central = nullptr;
|
||||
this->use_virtual_memory = false;
|
||||
this->option = 0;
|
||||
this->start = nullptr;
|
||||
this->end = nullptr;
|
||||
}
|
||||
|
||||
void *CentralHeap::Allocate(size_t n, size_t align) {
|
||||
if (!util::IsPowerOfTwo(align)) {
|
||||
return nullptr;
|
||||
}
|
||||
if (n > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
if (align > PageSize) {
|
||||
return this->tls_heap_central->CacheLargeMemoryWithBigAlign(util::AlignUp(n, PageSize), align);
|
||||
}
|
||||
|
||||
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(n, align), align);
|
||||
const auto cls = TlsHeapStatic::GetClassFromSize(real_size);
|
||||
if (!cls) {
|
||||
return this->tls_heap_central->CacheLargeMemory(real_size);
|
||||
}
|
||||
if (real_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return this->tls_heap_central->CacheSmallMemory(cls, align);
|
||||
}
|
||||
|
||||
size_t CentralHeap::GetAllocationSize(const void *ptr) {
|
||||
const auto cls = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls > 0) {
|
||||
/* Check that the pointer has alignment from out allocator. */
|
||||
if (!util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment)) {
|
||||
return 0;
|
||||
}
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return TlsHeapStatic::GetChunkSize(cls);
|
||||
} else if (ptr != nullptr) {
|
||||
return this->tls_heap_central->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Free(void *ptr) {
|
||||
/* Allow Free(nullptr) */
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Check that the pointer has alignment from out allocator. */
|
||||
if(!util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment)) {
|
||||
AMS_ASSERT(util::IsAligned(reinterpret_cast<uintptr_t>(ptr), MinimumAlignment));
|
||||
return EFAULT;
|
||||
}
|
||||
|
||||
const auto cls = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls >= 0) {
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
if (cls) {
|
||||
return this->tls_heap_central->UncacheSmallMemory(ptr);
|
||||
} else {
|
||||
return this->tls_heap_central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
} else {
|
||||
AMS_ASSERT(cls >= 0);
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::FreeWithSize(void *ptr, size_t size) {
|
||||
if (TlsHeapStatic::GetClassFromSize(size)) {
|
||||
return this->tls_heap_central->UncacheSmallMemory(ptr);
|
||||
} else {
|
||||
return this->tls_heap_central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Reallocate(void *ptr, size_t size, void **p) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (!size) {
|
||||
return EINVAL;
|
||||
}
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
const auto cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const auto cls_from_ptr = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr) {
|
||||
if (cls_from_ptr <= 0) {
|
||||
return EFAULT;
|
||||
} else if (cls_from_size && cls_from_size <= cls_from_ptr) {
|
||||
*p = ptr;
|
||||
return 0;
|
||||
} else {
|
||||
const size_t new_chunk_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
*p = this->Allocate(new_chunk_size);
|
||||
if (*p) {
|
||||
std::memcpy(*p, ptr, size);
|
||||
return this->tls_heap_central->UncacheSmallMemory(ptr);
|
||||
} else {
|
||||
return ENOMEM;
|
||||
}
|
||||
}
|
||||
} else if (cls_from_size) {
|
||||
*p = this->Allocate(size);
|
||||
if (*p) {
|
||||
std::memcpy(*p, ptr, size);
|
||||
return this->tls_heap_central->UncacheLargeMemory(ptr);
|
||||
} else {
|
||||
return ENOMEM;
|
||||
}
|
||||
} else {
|
||||
return this->tls_heap_central->ReallocateLargeMemory(ptr, size, p);
|
||||
}
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Shrink(void *ptr, size_t size) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (!size) {
|
||||
return EINVAL;
|
||||
}
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
const auto cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const auto cls_from_ptr = this->tls_heap_central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr) {
|
||||
if (cls_from_ptr <= 0) {
|
||||
return EFAULT;
|
||||
} else if (cls_from_size && cls_from_size <= cls_from_ptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
} else if (cls_from_size) {
|
||||
return this->tls_heap_central->ShrinkLargeMemory(ptr, PageSize);
|
||||
} else {
|
||||
return this->tls_heap_central->ShrinkLargeMemory(ptr, size);
|
||||
}
|
||||
}
|
||||
|
||||
bool CentralHeap::MakeCache(CachedHeap *cached_heap) {
|
||||
if (cached_heap == nullptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AMS_ASSERT(this->tls_heap_central != nullptr);
|
||||
const auto cls = TlsHeapStatic::GetClassFromSize(sizeof(*cached_heap));
|
||||
void *tls_heap_cache = this->tls_heap_central->CacheSmallMemoryForSystem(cls);
|
||||
if (tls_heap_cache == nullptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
new (tls_heap_cache) TlsHeapCache(this->tls_heap_central, this->option);
|
||||
if (this->tls_heap_central->AddThreadCache(reinterpret_cast<TlsHeapCache *>(tls_heap_cache)) != 0) {
|
||||
this->tls_heap_central->UncacheSmallMemory(tls_heap_cache);
|
||||
return false;
|
||||
}
|
||||
|
||||
cached_heap->Reset(reinterpret_cast<TlsHeapCache *>(tls_heap_cache));
|
||||
return true;
|
||||
}
|
||||
|
||||
errno_t CentralHeap::WalkAllocatedPointers(HeapWalkCallback callback, void *user_data) {
|
||||
if (!callback || !this->tls_heap_central) {
|
||||
return EINVAL;
|
||||
}
|
||||
return this->tls_heap_central->WalkAllocatedPointers(callback, user_data);
|
||||
}
|
||||
|
||||
errno_t CentralHeap::QueryV(int query, std::va_list vl) {
|
||||
return this->QueryVImpl(query, std::addressof(vl));
|
||||
}
|
||||
|
||||
errno_t CentralHeap::Query(int query, ...) {
|
||||
std::va_list vl;
|
||||
va_start(vl, query);
|
||||
auto err = this->QueryVImpl(query, std::addressof(vl));
|
||||
va_end(vl);
|
||||
return err;
|
||||
}
|
||||
|
||||
errno_t CentralHeap::QueryVImpl(int _query, std::va_list *vl_ptr) {
|
||||
const AllocQuery query = static_cast<AllocQuery>(_query);
|
||||
switch (query) {
|
||||
case AllocQuery_Dump:
|
||||
case AllocQuery_DumpJson:
|
||||
{
|
||||
auto dump_mode = static_cast<DumpMode>(va_arg(*vl_ptr, int));
|
||||
auto fd = va_arg(*vl_ptr, int);
|
||||
if (this->tls_heap_central) {
|
||||
this->tls_heap_central->Dump(dump_mode, fd, query == AllocQuery_DumpJson);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_PageSize:
|
||||
{
|
||||
size_t *out = va_arg(*vl_ptr, size_t *);
|
||||
if (out) {
|
||||
*out = PageSize;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_AllocatedSize:
|
||||
case AllocQuery_FreeSize:
|
||||
case AllocQuery_SystemSize:
|
||||
case AllocQuery_MaxAllocatableSize:
|
||||
{
|
||||
size_t *out = va_arg(*vl_ptr, size_t *);
|
||||
if (!out) {
|
||||
return 0;
|
||||
}
|
||||
if (!this->tls_heap_central) {
|
||||
*out = 0;
|
||||
return 0;
|
||||
}
|
||||
TlsHeapMemStats stats;
|
||||
this->tls_heap_central->GetMemStats(std::addressof(stats));
|
||||
switch (query) {
|
||||
case AllocQuery_AllocatedSize:
|
||||
default:
|
||||
*out = stats.allocated_size;
|
||||
break;
|
||||
case AllocQuery_FreeSize:
|
||||
*out = stats.free_size;
|
||||
break;
|
||||
case AllocQuery_SystemSize:
|
||||
*out = stats.system_size;
|
||||
break;
|
||||
case AllocQuery_MaxAllocatableSize:
|
||||
*out = stats.max_allocatable_size;
|
||||
break;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_IsClean:
|
||||
{
|
||||
int *out = va_arg(*vl_ptr, int *);
|
||||
if (out) {
|
||||
*out = !this->tls_heap_central || this->tls_heap_central->IsClean();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_HeapHash:
|
||||
{
|
||||
HeapHash *out = va_arg(*vl_ptr, HeapHash *);
|
||||
if (out) {
|
||||
if (this->tls_heap_central) {
|
||||
this->tls_heap_central->CalculateHeapHash(out);
|
||||
} else {
|
||||
*out = {};
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
case AllocQuery_UnifyFreeList:
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
this->tls_heap_central->IsClean();
|
||||
return 0;
|
||||
case AllocQuery_SetColor:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
int color = va_arg(*vl_ptr, int);
|
||||
return this->tls_heap_central->SetColor(ptr, color);
|
||||
}
|
||||
case AllocQuery_GetColor:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
int *out = va_arg(*vl_ptr, int *);
|
||||
return this->tls_heap_central->GetColor(ptr, out);
|
||||
}
|
||||
case AllocQuery_SetName:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
const char *name = va_arg(*vl_ptr, const char *);
|
||||
return this->tls_heap_central->SetName(ptr, name);
|
||||
}
|
||||
case AllocQuery_GetName:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
void *ptr = va_arg(*vl_ptr, void *);
|
||||
char *dst = va_arg(*vl_ptr, char *);
|
||||
size_t dst_size = va_arg(*vl_ptr, size_t);
|
||||
return this->tls_heap_central->GetName(ptr, dst, dst_size);
|
||||
}
|
||||
case AllocQuery_FreeSizeMapped:
|
||||
case AllocQuery_MaxAllocatableSizeMapped:
|
||||
{
|
||||
/* NOTE: Nintendo does not check that the ptr is not null for this query, even though they do for other queries. */
|
||||
size_t *out = va_arg(*vl_ptr, size_t *);
|
||||
size_t free_size;
|
||||
size_t max_allocatable_size;
|
||||
auto err = this->tls_heap_central->GetMappedMemStats(std::addressof(free_size), std::addressof(max_allocatable_size));
|
||||
if (err == 0) {
|
||||
if (query == AllocQuery_FreeSizeMapped) {
|
||||
*out = free_size;
|
||||
} else {
|
||||
*out = max_allocatable_size;
|
||||
}
|
||||
}
|
||||
return err;
|
||||
}
|
||||
default:
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "../mem_impl_platform.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
using Prot = mem::impl::Prot;
|
||||
|
||||
inline errno_t AllocateVirtualMemory(void **ptr, size_t size) {
|
||||
return ::ams::mem::impl::virtual_alloc(ptr, size);
|
||||
}
|
||||
|
||||
inline errno_t FreeVirtualMemory(void *ptr, size_t size) {
|
||||
return ::ams::mem::impl::virtual_free(ptr, size);
|
||||
}
|
||||
|
||||
inline errno_t AllocatePhysicalMemory(void *ptr, size_t size) {
|
||||
return ::ams::mem::impl::physical_alloc(ptr, size, static_cast<Prot>(Prot_read | Prot_write));
|
||||
}
|
||||
|
||||
inline errno_t FreePhysicalMemory(void *ptr, size_t size) {
|
||||
return ::ams::mem::impl::physical_free(ptr, size);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,557 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_cache.hpp"
|
||||
#include "mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
TlsHeapCache::TlsHeapCache(TlsHeapCentral *central, u32 option) {
|
||||
/* Choose function impls based on option. */
|
||||
if ((option & HeapOption_DisableCache) != 0) {
|
||||
this->allocate = AllocateImpl<false>;
|
||||
this->allocate_aligned = AllocateAlignedImpl<false>;
|
||||
this->free = FreeImpl<false>;
|
||||
this->free_with_size = FreeWithSizeImpl<false>;
|
||||
this->get_allocation_size = GetAllocationSizeImpl<false>;
|
||||
this->reallocate = ReallocateImpl<false>;
|
||||
this->shrink = ShrinkImpl<false>;
|
||||
} else {
|
||||
this->allocate = AllocateImpl<true>;
|
||||
this->allocate_aligned = AllocateAlignedImpl<true>;
|
||||
this->free = FreeImpl<true>;
|
||||
this->free_with_size = FreeWithSizeImpl<true>;
|
||||
this->get_allocation_size = GetAllocationSizeImpl<true>;
|
||||
this->reallocate = ReallocateImpl<true>;
|
||||
this->shrink = ShrinkImpl<true>;
|
||||
}
|
||||
|
||||
/* Generate random bytes to mangle pointers. */
|
||||
if (auto err = gen_random(std::addressof(this->mangle_val), sizeof(this->mangle_val)); err != 0) {
|
||||
s64 epoch_time;
|
||||
epochtime(std::addressof(epoch_time));
|
||||
this->mangle_val = reinterpret_cast<uintptr_t>(std::addressof(epoch_time)) ^ static_cast<u64>(epoch_time);
|
||||
}
|
||||
|
||||
/* Set member variables. */
|
||||
this->central = central;
|
||||
this->total_heap_size = central->GetTotalHeapSize();
|
||||
this->heap_option = option;
|
||||
this->total_cached_size = 0;
|
||||
this->largest_class = 0;
|
||||
|
||||
/* Setup chunks. */
|
||||
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
|
||||
this->small_mem_lists[i] = nullptr;
|
||||
this->cached_size[i] = 0;
|
||||
this->chunk_count[i] = 1;
|
||||
}
|
||||
|
||||
/* Set fixed chunk counts for particularly small chunks. */
|
||||
this->chunk_count[1] = MaxChunkCount;
|
||||
this->chunk_count[2] = MaxChunkCount;
|
||||
this->chunk_count[3] = MaxChunkCount;
|
||||
this->chunk_count[4] = MaxChunkCount / 2;
|
||||
this->chunk_count[5] = MaxChunkCount / 2;
|
||||
this->chunk_count[6] = MaxChunkCount / 2;
|
||||
this->chunk_count[7] = MaxChunkCount / 4;
|
||||
this->chunk_count[8] = MaxChunkCount / 4;
|
||||
this->chunk_count[9] = MaxChunkCount / 4;
|
||||
}
|
||||
|
||||
void TlsHeapCache::Finalize() {
|
||||
/* Free all small mem lists. */
|
||||
this->ReleaseAllCache();
|
||||
|
||||
/* Remove this cache from the owner central heap. */
|
||||
this->central->RemoveThreadCache(this);
|
||||
this->central->UncacheSmallMemory(this);
|
||||
}
|
||||
|
||||
bool TlsHeapCache::CheckCache() const {
|
||||
for (size_t i = 0; i < util::size(this->small_mem_lists); i++) {
|
||||
void *ptr = this->small_mem_lists[i];
|
||||
if (ptr) {
|
||||
s64 depth = -static_cast<s64>(this->cached_size[i] / TlsHeapStatic::GetChunkSize(i));
|
||||
while (ptr) {
|
||||
ptr = *reinterpret_cast<void **>(this->ManglePointer(ptr));
|
||||
if ((++depth) == 0) {
|
||||
AMS_ASSERT(ptr == nullptr);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void TlsHeapCache::ReleaseAllCache() {
|
||||
for (size_t i = 0; i < util::size(this->small_mem_lists); i++) {
|
||||
if (this->small_mem_lists[i]) {
|
||||
this->central->UncacheSmallMemoryList(this, this->small_mem_lists[i]);
|
||||
this->small_mem_lists[i] = nullptr;
|
||||
this->cached_size[i] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
this->total_cached_size = 0;
|
||||
this->largest_class = 0;
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateImpl<false>(TlsHeapCache *_this, size_t size) {
|
||||
/* Validate allocation size. */
|
||||
if (size == 0 || size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (const size_t cls = TlsHeapStatic::GetClassFromSize(size); cls != 0) {
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return _this->central->CacheSmallMemory(cls);
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateImpl<true>(TlsHeapCache *_this, size_t size) {
|
||||
/* Validate allocation size. */
|
||||
if (size == 0 || size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (size_t cls = TlsHeapStatic::GetClassFromSize(size); cls != 0) {
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
/* Allocate a chunk. */
|
||||
void *ptr = _this->small_mem_lists[cls];
|
||||
if (ptr == nullptr) {
|
||||
const size_t prev_cls = cls;
|
||||
size_t count = _this->chunk_count[cls];
|
||||
|
||||
size_t n = _this->central->CacheSmallMemoryList(_this, std::addressof(cls), count, std::addressof(ptr));
|
||||
if (n == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (cls == prev_cls) {
|
||||
if (count < MaxChunkCount) {
|
||||
count++;
|
||||
}
|
||||
_this->chunk_count[cls] = std::max(count, n);
|
||||
} else {
|
||||
AMS_ASSERT(n == 1);
|
||||
}
|
||||
|
||||
const size_t csize = TlsHeapStatic::GetChunkSize(cls) * (n - 1);
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
_this->total_cached_size += csize;
|
||||
}
|
||||
|
||||
/* Demangle our pointer, update free list. */
|
||||
ptr = _this->ManglePointer(ptr);
|
||||
_this->small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
|
||||
|
||||
return ptr;
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateAlignedImpl<false>(TlsHeapCache *_this, size_t size, size_t align) {
|
||||
/* Ensure valid alignment. */
|
||||
if (!util::IsPowerOfTwo(align)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* NOTE: Nintendo does not check size == 0 here, despite doing so in Alloc */
|
||||
if (size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Handle big alignment. */
|
||||
if (align > TlsHeapStatic::PageSize) {
|
||||
return _this->central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
|
||||
}
|
||||
|
||||
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(size, align), align);
|
||||
|
||||
if (const size_t cls = TlsHeapStatic::GetClassFromSize(real_size); cls != 0) {
|
||||
if (real_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
return _this->central->CacheSmallMemory(cls, align);
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (real_size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(real_size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
void *TlsHeapCache::AllocateAlignedImpl<true>(TlsHeapCache *_this, size_t size, size_t align) {
|
||||
/* Ensure valid alignment. */
|
||||
if (!util::IsPowerOfTwo(align)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* NOTE: Nintendo does not check size == 0 here, despite doing so in Alloc */
|
||||
if (size > MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Handle big alignment. */
|
||||
if (align > TlsHeapStatic::PageSize) {
|
||||
return _this->central->CacheLargeMemoryWithBigAlign(util::AlignUp(size, TlsHeapStatic::PageSize), align);
|
||||
}
|
||||
|
||||
const size_t real_size = TlsHeapStatic::GetRealSizeFromSizeAndAlignment(util::AlignUp(size, align), align);
|
||||
|
||||
if (size_t cls = TlsHeapStatic::GetClassFromSize(real_size); cls != 0) {
|
||||
if (real_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
|
||||
/* Allocate a chunk. */
|
||||
void *ptr = _this->small_mem_lists[cls];
|
||||
if (ptr == nullptr) {
|
||||
const size_t prev_cls = cls;
|
||||
size_t count = _this->chunk_count[cls];
|
||||
|
||||
size_t n = _this->central->CacheSmallMemoryList(_this, std::addressof(cls), count, std::addressof(ptr), align);
|
||||
if (n == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (cls == prev_cls) {
|
||||
if (count < MaxChunkCount) {
|
||||
count++;
|
||||
}
|
||||
_this->chunk_count[cls] = std::max(count, n);
|
||||
} else {
|
||||
AMS_ASSERT(n == 1);
|
||||
}
|
||||
|
||||
const s32 csize = TlsHeapStatic::GetChunkSize(cls) * (n - 1);
|
||||
_this->total_cached_size += csize;
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
}
|
||||
|
||||
/* Demangle our pointer, update free list. */
|
||||
ptr = _this->ManglePointer(ptr);
|
||||
_this->small_mem_lists[cls] = *reinterpret_cast<void **>(ptr);
|
||||
|
||||
return ptr;
|
||||
} else {
|
||||
/* If allocating a huge size, release our cache. */
|
||||
if (size >= _this->total_heap_size / 4) {
|
||||
_this->ReleaseAllCache();
|
||||
}
|
||||
return _this->central->CacheLargeMemory(size);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeImpl<false>(TlsHeapCache *_this, void *ptr) {
|
||||
const size_t cls = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
|
||||
if (static_cast<s32>(cls) >= 0) {
|
||||
return _this->central->UncacheSmallMemory(ptr);
|
||||
} else if (ptr == nullptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeImpl<true>(TlsHeapCache *_this, void *ptr) {
|
||||
const size_t cls = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
}
|
||||
|
||||
AMS_ASSERT(cls < TlsHeapStatic::NumClassInfo);
|
||||
|
||||
if (static_cast<s32>(cls) >= 0) {
|
||||
*reinterpret_cast<void **>(ptr) = _this->small_mem_lists[cls];
|
||||
_this->small_mem_lists[cls] = _this->ManglePointer(ptr);
|
||||
|
||||
const s32 csize = TlsHeapStatic::GetChunkSize(cls);
|
||||
_this->total_cached_size += csize;
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
|
||||
errno_t err = 0;
|
||||
if (!_this->central->CheckCachedSize(_this->total_cached_size)) {
|
||||
_this->central->UncacheSmallMemoryList(_this, _this->small_mem_lists[_this->largest_class]);
|
||||
_this->small_mem_lists[_this->largest_class] = nullptr;
|
||||
_this->total_cached_size -= _this->cached_size[_this->largest_class];
|
||||
_this->cached_size[_this->largest_class] = 0;
|
||||
|
||||
s32 largest_class = 0;
|
||||
s32 biggest_size = -1;
|
||||
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
|
||||
if (biggest_size < _this->cached_size[i]) {
|
||||
biggest_size = _this->cached_size[i];
|
||||
largest_class = static_cast<s32>(i);
|
||||
}
|
||||
}
|
||||
_this->largest_class = largest_class;
|
||||
}
|
||||
return err;
|
||||
} else if (ptr == nullptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeWithSizeImpl<false>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const size_t cls = TlsHeapStatic::GetClassFromSize(size);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
} else {
|
||||
return _this->central->UncacheSmallMemory(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::FreeWithSizeImpl<true>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const size_t cls = TlsHeapStatic::GetClassFromSize(size);
|
||||
if (cls == 0) {
|
||||
return _this->central->UncacheLargeMemory(ptr);
|
||||
} else {
|
||||
*reinterpret_cast<void **>(ptr) = _this->small_mem_lists[cls];
|
||||
_this->small_mem_lists[cls] = _this->ManglePointer(ptr);
|
||||
|
||||
const s32 csize = TlsHeapStatic::GetChunkSize(cls);
|
||||
_this->total_cached_size += csize;
|
||||
_this->cached_size[cls] += csize;
|
||||
if (_this->cached_size[cls] > _this->cached_size[_this->largest_class]) {
|
||||
_this->largest_class = cls;
|
||||
}
|
||||
|
||||
errno_t err = 0;
|
||||
if (!_this->central->CheckCachedSize(_this->total_cached_size)) {
|
||||
_this->central->UncacheSmallMemoryList(_this, _this->small_mem_lists[_this->largest_class]);
|
||||
_this->small_mem_lists[_this->largest_class] = nullptr;
|
||||
_this->total_cached_size -= _this->cached_size[_this->largest_class];
|
||||
_this->cached_size[_this->largest_class] = 0;
|
||||
|
||||
s32 largest_class = 0;
|
||||
s32 biggest_size = -1;
|
||||
for (size_t i = 0; i < TlsHeapStatic::NumClassInfo; i++) {
|
||||
if (biggest_size < _this->cached_size[i]) {
|
||||
biggest_size = _this->cached_size[i];
|
||||
largest_class = static_cast<s32>(i);
|
||||
}
|
||||
}
|
||||
_this->largest_class = largest_class;
|
||||
}
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
size_t TlsHeapCache::GetAllocationSizeImpl<false>(TlsHeapCache *_this, const void *ptr) {
|
||||
return _this->GetAllocationSizeCommonImpl(ptr);
|
||||
}
|
||||
|
||||
template<>
|
||||
size_t TlsHeapCache::GetAllocationSizeImpl<true>(TlsHeapCache *_this, const void *ptr) {
|
||||
return _this->GetAllocationSizeCommonImpl(ptr);
|
||||
}
|
||||
|
||||
size_t TlsHeapCache::GetAllocationSizeCommonImpl(const void *ptr) const {
|
||||
const s32 cls = this->central->GetClassFromPointer(ptr);
|
||||
if (cls > 0) {
|
||||
if (!util::IsAligned(ptr, alignof(u64))) {
|
||||
/* All pointers we allocate have alignment at least 8. */
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Validate class. */
|
||||
AMS_ASSERT(cls < static_cast<s32>(TlsHeapStatic::NumClassInfo));
|
||||
if (cls < 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return TlsHeapStatic::GetChunkSize(cls);
|
||||
} else if (ptr != nullptr) {
|
||||
return this->central->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ReallocateImpl<false>(TlsHeapCache *_this, void *ptr, size_t size, void **p) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
size_t alloc_size, copy_size;
|
||||
|
||||
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const s32 cls_from_ptr = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr < 0) {
|
||||
/* error case. */
|
||||
return EFAULT;
|
||||
} else if (cls_from_size) {
|
||||
if (cls_from_ptr > 0) {
|
||||
if (cls_from_size <= cls_from_ptr) {
|
||||
*p = ptr;
|
||||
return 0;
|
||||
} else {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
} else /* if (cls_from_ptr == 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = size;
|
||||
}
|
||||
} else if (cls_from_ptr == 0) {
|
||||
return _this->central->ReallocateLargeMemory(ptr, size, p);
|
||||
} else /* if (cls_from_ptr > 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
|
||||
*p = AllocateImpl<false>(_this, alloc_size);
|
||||
if (*p == nullptr) {
|
||||
return ENOMEM;
|
||||
}
|
||||
std::memcpy(*p, ptr, copy_size);
|
||||
return FreeImpl<false>(_this, ptr);
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ReallocateImpl<true>(TlsHeapCache *_this, void *ptr, size_t size, void **p) {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
size_t alloc_size, copy_size;
|
||||
|
||||
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const s32 cls_from_ptr = _this->central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr < 0) {
|
||||
/* error case. */
|
||||
return EFAULT;
|
||||
} else if (cls_from_size) {
|
||||
if (cls_from_ptr > 0) {
|
||||
if (cls_from_size <= cls_from_ptr) {
|
||||
*p = ptr;
|
||||
return 0;
|
||||
} else {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
} else /* if (cls_from_ptr == 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = size;
|
||||
}
|
||||
} else if (cls_from_ptr == 0) {
|
||||
return _this->central->ReallocateLargeMemory(ptr, size, p);
|
||||
} else /* if (cls_from_ptr > 0) */ {
|
||||
alloc_size = size;
|
||||
copy_size = TlsHeapStatic::GetChunkSize(cls_from_ptr);
|
||||
}
|
||||
|
||||
*p = AllocateImpl<true>(_this, alloc_size);
|
||||
if (*p == nullptr) {
|
||||
return ENOMEM;
|
||||
}
|
||||
std::memcpy(*p, ptr, copy_size);
|
||||
return FreeImpl<true>(_this, ptr);
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ShrinkImpl<false>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
return _this->ShrinkCommonImpl(ptr, size);
|
||||
}
|
||||
|
||||
template<>
|
||||
errno_t TlsHeapCache::ShrinkImpl<true>(TlsHeapCache *_this, void *ptr, size_t size) {
|
||||
return _this->ShrinkCommonImpl(ptr, size);
|
||||
}
|
||||
|
||||
errno_t TlsHeapCache::ShrinkCommonImpl(void *ptr, size_t size) const {
|
||||
AMS_ASSERT(ptr != nullptr && size != 0);
|
||||
if (size > MaxSize) {
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
const s32 cls_from_size = TlsHeapStatic::GetClassFromSize(size);
|
||||
const s32 cls_from_ptr = this->central->GetClassFromPointer(ptr);
|
||||
if (cls_from_ptr) {
|
||||
if (cls_from_ptr <= 0) {
|
||||
return EFAULT;
|
||||
} else if (cls_from_size && cls_from_size <= cls_from_ptr) {
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
} else if (cls_from_size) {
|
||||
return this->central->ShrinkLargeMemory(ptr, TlsHeapStatic::PageSize);
|
||||
} else {
|
||||
return this->central->ShrinkLargeMemory(ptr, size);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
class TlsHeapCentral;
|
||||
|
||||
#define FOREACH_TLS_HEAP_CACHE_FUNC(HANDLER) \
|
||||
HANDLER(void *, Allocate, allocate, size_t size); \
|
||||
HANDLER(void *, AllocateAligned, allocate_aligned, size_t size, size_t align); \
|
||||
HANDLER(errno_t, Free, free, void *ptr); \
|
||||
HANDLER(errno_t, FreeWithSize, free_with_size, void *ptr, size_t size); \
|
||||
HANDLER(size_t, GetAllocationSize, get_allocation_size, const void *ptr); \
|
||||
HANDLER(errno_t, Reallocate, reallocate, void *ptr, size_t size, void **p); \
|
||||
HANDLER(errno_t, Shrink, shrink, void *ptr, size_t size);
|
||||
|
||||
class TlsHeapCache {
|
||||
public:
|
||||
static constexpr size_t MaxChunkCount = BITSIZEOF(u64);
|
||||
public:
|
||||
#define TLS_HEAP_CACHE_DECLARE_TYPEDEF(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
using NAME##Func = RETURN (*)(TlsHeapCache *, ## __VA_ARGS__)
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_TYPEDEF)
|
||||
|
||||
#undef TLS_HEAP_CACHE_DECLARE_TYPEDEF
|
||||
private:
|
||||
#define TLS_HEAP_CACHE_DECLARE_MEMBER(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
NAME##Func MEMBER_NAME;
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_MEMBER)
|
||||
|
||||
#undef TLS_HEAP_CACHE_DECLARE_MEMBER
|
||||
|
||||
uintptr_t mangle_val;
|
||||
TlsHeapCentral *central;
|
||||
size_t total_heap_size;
|
||||
u32 heap_option;
|
||||
s32 total_cached_size;
|
||||
s32 largest_class;
|
||||
void *small_mem_lists[TlsHeapStatic::NumClassInfo];
|
||||
s32 cached_size[TlsHeapStatic::NumClassInfo];
|
||||
u8 chunk_count[TlsHeapStatic::NumClassInfo];
|
||||
public:
|
||||
TlsHeapCache(TlsHeapCentral *central, u32 option);
|
||||
void Finalize();
|
||||
|
||||
void *ManglePointer(void *ptr) const {
|
||||
return reinterpret_cast<void *>(reinterpret_cast<uintptr_t>(ptr) ^ this->mangle_val);
|
||||
}
|
||||
|
||||
bool CheckCache() const;
|
||||
void ReleaseAllCache();
|
||||
|
||||
public:
|
||||
/* TODO: Better handler with type info to macro this? */
|
||||
ALWAYS_INLINE void *Allocate(size_t size) { return this->allocate(this, size); }
|
||||
ALWAYS_INLINE void *Allocate(size_t size, size_t align) { return this->allocate_aligned(this, size, align); }
|
||||
ALWAYS_INLINE errno_t Free(void *ptr) { return this->free(this, ptr); }
|
||||
ALWAYS_INLINE errno_t FreeWithSize(void *ptr, size_t size) { return this->free_with_size(this, ptr, size); }
|
||||
ALWAYS_INLINE size_t GetAllocationSize(const void *ptr) { return this->get_allocation_size(this, ptr); }
|
||||
ALWAYS_INLINE errno_t Reallocate(void *ptr, size_t size, void **p) { return this->reallocate(this, ptr, size, p); }
|
||||
ALWAYS_INLINE errno_t Shrink(void *ptr, size_t size) { return this->shrink(this, ptr, size); }
|
||||
private:
|
||||
#define TLS_HEAP_CACHE_DECLARE_TEMPLATE(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
template<bool Cache> static RETURN NAME##Impl(TlsHeapCache *_this, ## __VA_ARGS__ )
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_TEMPLATE)
|
||||
|
||||
#undef TLS_HEAP_CACHE_DECLARE_TEMPLATE
|
||||
|
||||
size_t GetAllocationSizeCommonImpl(const void *ptr) const;
|
||||
errno_t ShrinkCommonImpl(void *ptr, size_t size) const;
|
||||
};
|
||||
|
||||
#define TLS_HEAP_CACHE_DECLARE_INSTANTIATION(RETURN, NAME, MEMBER_NAME, ...) \
|
||||
template<> RETURN TlsHeapCache::NAME##Impl<false>(TlsHeapCache *_this, ##__VA_ARGS__); \
|
||||
template<> RETURN TlsHeapCache::NAME##Impl<true>(TlsHeapCache *_this, ##__VA_ARGS__)
|
||||
|
||||
FOREACH_TLS_HEAP_CACHE_FUNC(TLS_HEAP_CACHE_DECLARE_INSTANTIATION)
|
||||
|
||||
#undef FOREACH_TLS_HEAP_CACHE_FUNC
|
||||
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,547 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
#include "mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "mem_impl_heap_tls_heap_cache.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
/* Simple intrusive list. */
|
||||
template<typename T>
|
||||
struct ListHeader {
|
||||
T *list_next;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
struct ListElement : public ListHeader<T> {
|
||||
T *list_prev;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListClearLink(ListHeader<T> *l) {
|
||||
l->list_next = nullptr;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListClearLink(ListElement<T> *l) {
|
||||
l->list_next = nullptr;
|
||||
l->list_prev = nullptr;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T *ListGetNext(const ListHeader<T> *l) {
|
||||
return l->list_next;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T *ListGetNext(const ListElement<T> *l) {
|
||||
return l->list_next;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline T *ListGetPrev(const ListElement<T> *l) {
|
||||
return l->list_prev;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListInsertAfter(ListHeader<T> *hdr, T *e) {
|
||||
e->list_next = hdr->list_next;
|
||||
e->list_prev = static_cast<T *>(hdr);
|
||||
|
||||
if (hdr->list_next != nullptr) {
|
||||
hdr->list_next->list_prev = e;
|
||||
}
|
||||
hdr->list_next = e;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
constexpr inline void ListRemoveSelf(T *e) {
|
||||
if (e->list_next != nullptr) {
|
||||
e->list_next->list_prev = e->list_prev;
|
||||
}
|
||||
if (e->list_prev != nullptr) {
|
||||
e->list_prev->list_next = e->list_next;
|
||||
}
|
||||
e->list_next = nullptr;
|
||||
e->list_prev = nullptr;
|
||||
}
|
||||
|
||||
struct Span : public ListElement<Span> {
|
||||
struct SmallMemory {
|
||||
SmallMemory *next;
|
||||
};
|
||||
|
||||
enum Status : u8 {
|
||||
Status_NotUsed = 0,
|
||||
Status_InUse = 1,
|
||||
Status_InFreeList = 2,
|
||||
Status_InUseSystem = 3,
|
||||
};
|
||||
|
||||
u16 object_count;
|
||||
u8 page_class;
|
||||
u8 status;
|
||||
s32 id;
|
||||
union {
|
||||
uintptr_t u;
|
||||
void *p;
|
||||
SmallMemory *sm;
|
||||
char *cp;
|
||||
} start;
|
||||
uintptr_t num_pages;
|
||||
union {
|
||||
struct {
|
||||
SmallMemory *objects;
|
||||
u64 is_allocated[8];
|
||||
} small;
|
||||
struct {
|
||||
u8 color[3];
|
||||
char name[0x10];
|
||||
} large;
|
||||
struct {
|
||||
u32 zero;
|
||||
} large_clear;
|
||||
} aux;
|
||||
};
|
||||
|
||||
struct SpanPage : public ListElement<SpanPage> {
|
||||
struct Info {
|
||||
u64 alloc_bitmap;
|
||||
u16 free_count;
|
||||
u8 is_sticky;
|
||||
Span span_of_spanpage;
|
||||
} info;
|
||||
Span spans[(TlsHeapStatic::PageSize - sizeof(Info) - sizeof(ListElement<SpanPage>)) / sizeof(Span)];
|
||||
|
||||
static constexpr size_t MaxSpanCount = sizeof(spans) / sizeof(spans[0]);
|
||||
};
|
||||
static_assert(sizeof(SpanPage) <= TlsHeapStatic::PageSize);
|
||||
|
||||
static constexpr ALWAYS_INLINE bool CanAllocateSpan(const SpanPage *span_page) {
|
||||
return span_page->info.alloc_bitmap != ~(decltype(span_page->info.alloc_bitmap){});
|
||||
}
|
||||
|
||||
struct SpanTable {
|
||||
uintptr_t total_pages;
|
||||
Span **page_to_span;
|
||||
u8 *pageclass_cache;
|
||||
};
|
||||
|
||||
struct TlsHeapMemStats {
|
||||
size_t allocated_size;
|
||||
size_t free_size;
|
||||
size_t system_size;
|
||||
size_t max_allocatable_size;
|
||||
};
|
||||
|
||||
ALWAYS_INLINE Span *GetSpanFromPointer(const SpanTable *table, const void *ptr) {
|
||||
const size_t idx = TlsHeapStatic::GetPageIndex(reinterpret_cast<uintptr_t>(ptr) - reinterpret_cast<uintptr_t>(table));
|
||||
if (idx < table->total_pages) {
|
||||
return table->page_to_span[idx];
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
ALWAYS_INLINE SpanPage *GetSpanPage(Span *span) {
|
||||
return reinterpret_cast<SpanPage *>(TlsHeapStatic::AlignDownPage(reinterpret_cast<uintptr_t>(span)));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Span *GetSpanPageSpan(SpanPage *span_page) {
|
||||
return std::addressof(span_page->info.span_of_spanpage);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Span *GetPrevSpan(const SpanTable *span_table, const Span *span) {
|
||||
return GetSpanFromPointer(span_table, reinterpret_cast<const void *>(span->start.u - 1));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE Span *GetNextSpan(const SpanTable *span_table, const Span *span) {
|
||||
return GetSpanFromPointer(span_table, reinterpret_cast<const void *>(span->start.u + span->num_pages * TlsHeapStatic::PageSize));
|
||||
}
|
||||
|
||||
class TlsHeapCentral {
|
||||
private:
|
||||
using FreeListAvailableWord = u64;
|
||||
|
||||
static constexpr size_t FreeListCount = 0x100;
|
||||
static constexpr size_t NumFreeListBitmaps = FreeListCount / BITSIZEOF(FreeListAvailableWord);
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t FreeListAvailableIndex(size_t which) {
|
||||
return which / BITSIZEOF(FreeListAvailableWord);
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t FreeListAvailableBit(size_t which) {
|
||||
return which % BITSIZEOF(FreeListAvailableWord);
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE FreeListAvailableWord FreeListAvailableMask(size_t which) {
|
||||
return static_cast<FreeListAvailableWord>(1) << FreeListAvailableBit(which);
|
||||
}
|
||||
|
||||
static_assert(NumFreeListBitmaps * BITSIZEOF(FreeListAvailableWord) == FreeListCount);
|
||||
private:
|
||||
SpanTable span_table;
|
||||
u8 *physical_page_flags;
|
||||
s32 num_threads;
|
||||
s32 static_thread_quota;
|
||||
s32 dynamic_thread_quota;
|
||||
bool use_virtual_memory;
|
||||
os::RecursiveMutex lock;
|
||||
ListHeader<SpanPage> spanpage_list;
|
||||
ListHeader<SpanPage> full_spanpage_list;
|
||||
ListHeader<Span> freelists[FreeListCount];
|
||||
FreeListAvailableWord freelists_bitmap[NumFreeListBitmaps];
|
||||
ListHeader<Span> smallmem_lists[TlsHeapStatic::NumClassInfo];
|
||||
public:
|
||||
TlsHeapCentral() {
|
||||
this->span_table.total_pages = 0;
|
||||
}
|
||||
|
||||
errno_t Initialize(void *start, size_t size, bool use_virtual_memory);
|
||||
bool IsClean();
|
||||
|
||||
errno_t ReallocateLargeMemory(void *ptr, size_t size, void **p);
|
||||
errno_t ShrinkLargeMemory(void *ptr, size_t size);
|
||||
|
||||
void CalculateHeapHash(HeapHash *out);
|
||||
|
||||
errno_t AddThreadCache(TlsHeapCache *cache) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
/* Add thread and recalculate. */
|
||||
this->num_threads++;
|
||||
this->dynamic_thread_quota = this->GetTotalHeapSize() / (2 * this->num_threads);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t RemoveThreadCache(TlsHeapCache *cache) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
/* Remove thread and recalculate. */
|
||||
this->num_threads--;
|
||||
this->dynamic_thread_quota = this->GetTotalHeapSize() / (2 * this->num_threads);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void *CacheLargeMemory(size_t size) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
const size_t num_pages = util::AlignUp(size, TlsHeapStatic::PageSize) / TlsHeapStatic::PageSize;
|
||||
if (Span *span = this->AllocatePagesImpl(num_pages); span != nullptr) {
|
||||
return span->start.p;
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void *CacheLargeMemoryWithBigAlign(size_t size, size_t align) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
const size_t num_pages = util::AlignUp(size, TlsHeapStatic::PageSize) / TlsHeapStatic::PageSize;
|
||||
|
||||
Span *span = nullptr;
|
||||
if (align > TlsHeapStatic::PageSize) {
|
||||
span = this->AllocatePagesWithBigAlignImpl(num_pages, align);
|
||||
} else {
|
||||
span = this->AllocatePagesImpl(num_pages);
|
||||
}
|
||||
|
||||
if (span != nullptr) {
|
||||
return span->start.p;
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void *CacheSmallMemory(size_t cls, size_t align = 0) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->CacheSmallMemoryImpl(cls, align, false);
|
||||
}
|
||||
|
||||
void *CacheSmallMemoryForSystem(size_t cls) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->CacheSmallMemoryImpl(cls, 0, true);
|
||||
}
|
||||
|
||||
size_t CacheSmallMemoryList(TlsHeapCache *cache, size_t *cls, size_t count, void **p, size_t align = 0) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
s32 cpu_id = 0;
|
||||
if (*cls < 8) {
|
||||
getcpu(std::addressof(cpu_id));
|
||||
}
|
||||
|
||||
return this->CacheSmallMemoryListImpl(cache, cls, count, p, cpu_id, 0);
|
||||
}
|
||||
|
||||
bool CheckCachedSize(s32 size) const {
|
||||
return size < this->dynamic_thread_quota && size < this->static_thread_quota;
|
||||
}
|
||||
|
||||
void Dump(DumpMode dump_mode, int fd, bool json) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
return this->DumpImpl(dump_mode, fd, json);
|
||||
}
|
||||
|
||||
size_t GetAllocationSize(const void *ptr) {
|
||||
if (TlsHeapStatic::IsPageAligned(ptr)) {
|
||||
Span *span = nullptr;
|
||||
{
|
||||
std::scoped_lock lk(this->lock);
|
||||
span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
|
||||
}
|
||||
if (span != nullptr) {
|
||||
return span->num_pages * TlsHeapStatic::PageSize;
|
||||
} else {
|
||||
AMS_ASSERT(span != nullptr);
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
/* TODO: Handle error? */
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetClassFromPointer(const void *ptr) {
|
||||
std::atomic_thread_fence(std::memory_order_acquire);
|
||||
|
||||
const size_t idx = (reinterpret_cast<uintptr_t>(ptr) - reinterpret_cast<uintptr_t>(this)) / TlsHeapStatic::PageSize;
|
||||
if (idx < this->span_table.total_pages) {
|
||||
if (ptr != nullptr) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr);
|
||||
if (span != nullptr) {
|
||||
AMS_ASSERT(span->page_class == this->span_table.pageclass_cache[idx]);
|
||||
} else {
|
||||
AMS_ASSERT(span != nullptr);
|
||||
}
|
||||
}
|
||||
return this->span_table.pageclass_cache[idx];
|
||||
} else {
|
||||
/* TODO: Handle error? */
|
||||
return 0xFFFFFFFF;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t GetColor(const void *ptr, int *out) {
|
||||
if (out == nullptr) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
*out = (span->aux.large.color[0] << 0) | (span->aux.large.color[1] << 0) | (span->aux.large.color[2] << 16);
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t SetColor(const void *ptr, int color) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
span->aux.large.color[0] = (color >> 0) & 0xFF;
|
||||
span->aux.large.color[1] = (color >> 8) & 0xFF;
|
||||
span->aux.large.color[2] = (color >> 16) & 0xFF;
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t GetMappedMemStats(size_t *out_free_size, size_t *out_max_allocatable_size) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->GetMappedMemStatsImpl(out_free_size, out_max_allocatable_size);
|
||||
}
|
||||
|
||||
errno_t GetMemStats(TlsHeapMemStats *out) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
return this->GetMemStatsImpl(out);
|
||||
}
|
||||
|
||||
errno_t GetName(const void *ptr, char *dst, size_t dst_size) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
strlcpy(dst, span->aux.large.name, dst_size);
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t SetName(const void *ptr, const char *name) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr && !span->page_class) {
|
||||
strlcpy(span->aux.large.name, name, sizeof(span->aux.large.name));
|
||||
return 0;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetTotalHeapSize() const {
|
||||
return this->span_table.total_pages * TlsHeapStatic::PageSize;
|
||||
}
|
||||
|
||||
errno_t UncacheLargeMemory(void *ptr) {
|
||||
if (TlsHeapStatic::IsPageAligned(ptr)) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
if (Span *span = GetSpanFromPointer(std::addressof(this->span_table), ptr); span != nullptr) {
|
||||
this->FreePagesImpl(span);
|
||||
return 0;
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
} else {
|
||||
return EFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t UncacheSmallMemory(void *ptr) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
return this->UncacheSmallMemoryImpl(ptr);
|
||||
}
|
||||
|
||||
errno_t UncacheSmallMemoryList(TlsHeapCache *cache, void *ptr) {
|
||||
std::scoped_lock lk(this->lock);
|
||||
|
||||
while (true) {
|
||||
if (ptr == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
ptr = cache->ManglePointer(ptr);
|
||||
void *next = *reinterpret_cast<void **>(ptr);
|
||||
if (auto err = this->UncacheSmallMemoryImpl(ptr); err != 0) {
|
||||
return err;
|
||||
}
|
||||
ptr = next;
|
||||
}
|
||||
}
|
||||
|
||||
errno_t WalkAllocatedPointers(HeapWalkCallback callback, void *user_data) {
|
||||
/* Explicitly handle locking, as we will release the lock during callback. */
|
||||
this->lock.lock();
|
||||
ON_SCOPE_EXIT { this->lock.unlock(); };
|
||||
|
||||
return this->WalkAllocatedPointersImpl(callback, user_data);
|
||||
}
|
||||
private:
|
||||
SpanPage *AllocateSpanPage();
|
||||
Span *AllocateSpanFromSpanPage(SpanPage *sp);
|
||||
|
||||
Span *SplitSpan(Span *span, size_t num_pages, Span *new_span);
|
||||
void MergeFreeSpans(Span *span, Span *span_to_merge, uintptr_t start);
|
||||
|
||||
bool DestroySpanPageIfEmpty(SpanPage *sp, bool full);
|
||||
Span *GetFirstSpan() const;
|
||||
Span *MakeFreeSpan(size_t num_pages);
|
||||
Span *SearchFreeSpan(size_t num_pages) const;
|
||||
|
||||
void FreeSpanToSpanPage(Span *span, SpanPage *sp);
|
||||
void FreeSpanToSpanPage(Span *span);
|
||||
|
||||
void MergeIntoFreeList(Span *&span);
|
||||
|
||||
errno_t AllocatePhysical(void *start, size_t size);
|
||||
errno_t FreePhysical(void *start, size_t size);
|
||||
private:
|
||||
Span *AllocatePagesImpl(size_t num_pages);
|
||||
Span *AllocatePagesWithBigAlignImpl(size_t num_pages, size_t align);
|
||||
void FreePagesImpl(Span *span);
|
||||
|
||||
void *CacheSmallMemoryImpl(size_t cls, size_t align, bool for_system);
|
||||
errno_t UncacheSmallMemoryImpl(void *ptr);
|
||||
|
||||
size_t CacheSmallMemoryListImpl(TlsHeapCache *cache, size_t *cls, size_t count, void **p, s32 cpu_id, size_t align);
|
||||
|
||||
errno_t WalkAllocatedPointersImpl(HeapWalkCallback callback, void *user_data);
|
||||
|
||||
errno_t GetMappedMemStatsImpl(size_t *out_free_size, size_t *out_max_allocatable_size);
|
||||
errno_t GetMemStatsImpl(TlsHeapMemStats *out);
|
||||
|
||||
void DumpImpl(DumpMode dump_mode, int fd, bool json);
|
||||
private:
|
||||
size_t FreeListFirstNonEmpty(size_t start) const {
|
||||
if (start < FreeListCount) {
|
||||
for (size_t i = FreeListAvailableIndex(start); i < util::size(this->freelists_bitmap); i++) {
|
||||
const FreeListAvailableWord masked = this->freelists_bitmap[i] & ~(FreeListAvailableMask(start) - 1);
|
||||
if (masked) {
|
||||
const size_t b = __builtin_ctzll(masked);
|
||||
const size_t res = i * BITSIZEOF(FreeListAvailableWord) + b;
|
||||
AMS_ASSERT(res < FreeListCount);
|
||||
return res;
|
||||
}
|
||||
start = (i + 1) * BITSIZEOF(FreeListAvailableWord);
|
||||
}
|
||||
}
|
||||
return FreeListCount;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void AddToFreeBlockList(Span *span) {
|
||||
AMS_ASSERT(GetSpanPageSpan(GetSpanPage(span)) != span);
|
||||
AMS_ASSERT(span->status == Span::Status_InFreeList);
|
||||
const size_t which = std::min(span->num_pages, FreeListCount) - 1;
|
||||
ListInsertAfter(std::addressof(this->freelists[which]), span);
|
||||
this->freelists_bitmap[FreeListAvailableIndex(which)] |= FreeListAvailableMask(which);
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void RemoveFromFreeBlockList(Span *span) {
|
||||
const size_t which = std::min(span->num_pages, FreeListCount) - 1;
|
||||
ListRemoveSelf(span);
|
||||
if (!ListGetNext(std::addressof(this->freelists[which]))) {
|
||||
this->freelists_bitmap[FreeListAvailableIndex(which)] &= ~FreeListAvailableMask(which);
|
||||
}
|
||||
}
|
||||
|
||||
Span *AllocateSpanStruct() {
|
||||
SpanPage *sp = ListGetNext(std::addressof(this->spanpage_list));
|
||||
while (sp && (sp->info.is_sticky || !CanAllocateSpan(sp))) {
|
||||
sp = ListGetNext(sp);
|
||||
}
|
||||
|
||||
if (sp == nullptr) {
|
||||
sp = this->AllocateSpanPage();
|
||||
}
|
||||
|
||||
if (sp != nullptr) {
|
||||
return this->AllocateSpanFromSpanPage(sp);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
s32 CallWalkCallback(HeapWalkCallback callback, void *ptr, size_t size, void *user_data) {
|
||||
this->lock.unlock();
|
||||
int res = callback(ptr, size, user_data);
|
||||
this->lock.lock();
|
||||
if (res) {
|
||||
return 0;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,210 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_heap_platform.hpp"
|
||||
|
||||
namespace ams::mem::impl::heap {
|
||||
|
||||
class TlsHeapStatic {
|
||||
public:
|
||||
struct ClassInfo {
|
||||
u16 num_pages;
|
||||
u16 chunk_size;
|
||||
};
|
||||
|
||||
static constexpr size_t NumClassInfo = 57;
|
||||
|
||||
static constexpr size_t MaxSizeWithClass = 0xC00;
|
||||
static constexpr size_t ChunkGranularity = 0x10;
|
||||
static constexpr size_t PageSize = 4_KB;
|
||||
static constexpr size_t PhysicalPageSize = 256_KB;
|
||||
public:
|
||||
static constexpr inline std::array<ClassInfo, NumClassInfo> ClassInfos = {
|
||||
ClassInfo{ .num_pages = 0, .chunk_size = 0x000, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x010, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x020, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x030, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x040, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x050, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x060, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x070, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x080, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x090, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x0A0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x0B0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x0C0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x0D0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x0E0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x0F0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x100, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x110, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x120, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x130, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x140, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x150, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x160, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x170, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x180, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x190, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x1A0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x1B0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x1C0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x1D0, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x1E0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x200, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x210, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x220, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x240, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x260, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x270, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x280, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x2A0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x2D0, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x2E0, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x300, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x330, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x360, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x380, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x3B0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x400, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x450, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x490, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x4C0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x550, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x600, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0x660, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x6D0, },
|
||||
ClassInfo{ .num_pages = 1, .chunk_size = 0x800, },
|
||||
ClassInfo{ .num_pages = 3, .chunk_size = 0x990, },
|
||||
ClassInfo{ .num_pages = 2, .chunk_size = 0xAA0, },
|
||||
};
|
||||
|
||||
static constexpr inline std::array<size_t, MaxSizeWithClass / ChunkGranularity> SizeToClass = [] {
|
||||
std::array<size_t, MaxSizeWithClass / ChunkGranularity> arr = {};
|
||||
arr[0] = 1;
|
||||
for (size_t i = 1; i < arr.size(); i++) {
|
||||
const size_t cur_size = i * ChunkGranularity;
|
||||
for (size_t j = 0; j < ClassInfos.size(); j++) {
|
||||
if (ClassInfos[j].chunk_size >= cur_size) {
|
||||
arr[i] = j;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return arr;
|
||||
}();
|
||||
public:
|
||||
static constexpr ALWAYS_INLINE size_t GetClassFromSize(size_t size) {
|
||||
AMS_ASSERT(size <= MaxSize);
|
||||
const size_t idx = util::AlignUp(size, ChunkGranularity) / ChunkGranularity;
|
||||
if (idx < MaxSizeWithClass / ChunkGranularity) {
|
||||
return SizeToClass[idx];
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetRealSizeFromSizeAndAlignment(size_t size, size_t align) {
|
||||
AMS_ASSERT(size <= MaxSize);
|
||||
const size_t idx = util::AlignUp(size, ChunkGranularity) / ChunkGranularity;
|
||||
if (size == 0 || idx >= MaxSizeWithClass / ChunkGranularity) {
|
||||
return size;
|
||||
}
|
||||
const auto cls = SizeToClass[idx];
|
||||
if (!cls) {
|
||||
return PageSize;
|
||||
}
|
||||
AMS_ASSERT(align != 0);
|
||||
const size_t mask = align - 1;
|
||||
for (auto i = cls; i < ClassInfos.size(); i++) {
|
||||
if ((ClassInfos[i].chunk_size & mask) == 0) {
|
||||
return ClassInfos[i].chunk_size;
|
||||
}
|
||||
}
|
||||
return PageSize;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE bool IsPageAligned(uintptr_t ptr) {
|
||||
return util::IsAligned(ptr, PageSize);
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE bool IsPageAligned(const void *ptr) {
|
||||
return IsPageAligned(reinterpret_cast<uintptr_t>(ptr));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetPageIndex(uintptr_t ptr) {
|
||||
return ptr / PageSize;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetPhysicalPageIndex(uintptr_t ptr) {
|
||||
return ptr / PhysicalPageSize;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignUpPage(uintptr_t ptr) {
|
||||
return util::AlignUp(ptr, PageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignUpPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignUpPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignDownPage(uintptr_t ptr) {
|
||||
return util::AlignDown(ptr, PageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignDownPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignDownPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignUpPhysicalPage(uintptr_t ptr) {
|
||||
return util::AlignUp(ptr, PhysicalPageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignUpPhysicalPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PhysicalPageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignUpPhysicalPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE uintptr_t AlignDownPhysicalPage(uintptr_t ptr) {
|
||||
return util::AlignDown(ptr, PhysicalPageSize);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static ALWAYS_INLINE T *AlignDownPhysicalPage(T *ptr) {
|
||||
static_assert(std::is_pod<T>::value);
|
||||
static_assert(util::IsAligned(PhysicalPageSize, alignof(T)));
|
||||
return reinterpret_cast<T *>(AlignDownPhysicalPage(reinterpret_cast<uintptr_t>(ptr)));
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetChunkSize(size_t cls) {
|
||||
return ClassInfos[cls].chunk_size;
|
||||
}
|
||||
|
||||
static constexpr ALWAYS_INLINE size_t GetNumPages(size_t cls) {
|
||||
return ClassInfos[cls].num_pages;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::mem::impl {
|
||||
|
||||
enum Prot {
|
||||
Prot_none = (0 << 0),
|
||||
Prot_read = (1 << 0),
|
||||
Prot_write = (1 << 1),
|
||||
Prot_exec = (1 << 2),
|
||||
};
|
||||
|
||||
errno_t virtual_alloc(void **ptr, size_t size);
|
||||
errno_t virtual_free(void *ptr, size_t size);
|
||||
errno_t physical_alloc(void *ptr, size_t size, Prot prot);
|
||||
errno_t physical_free(void *ptr, size_t size);
|
||||
|
||||
size_t strlcpy(char *dst, const char *src, size_t size);
|
||||
|
||||
errno_t gen_random(void *dst, size_t dst_size);
|
||||
|
||||
errno_t epochtime(s64 *dst);
|
||||
|
||||
errno_t getcpu(s32 *out);
|
||||
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "mem_impl_platform.hpp"
|
||||
|
||||
namespace ams::mem::impl {
|
||||
|
||||
namespace {
|
||||
|
||||
os::Mutex g_virt_mem_enabled_lock;
|
||||
bool g_virt_mem_enabled_detected;
|
||||
bool g_virt_mem_enabled;
|
||||
|
||||
void EnsureVirtualAddressMemoryDetected() {
|
||||
std::scoped_lock lk(g_virt_mem_enabled_lock);
|
||||
if (AMS_LIKELY(g_virt_mem_enabled_detected)) {
|
||||
return;
|
||||
}
|
||||
g_virt_mem_enabled = os::IsVirtualAddressMemoryEnabled();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE bool IsVirtualAddressMemoryEnabled() {
|
||||
EnsureVirtualAddressMemoryDetected();
|
||||
return g_virt_mem_enabled;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE errno_t ConvertResult(Result result) {
|
||||
/* TODO: Actually implement this in a meaningful way. */
|
||||
if (R_FAILED(result)) {
|
||||
return EINVAL;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE os::MemoryPermission ConvertToOsPermission(Prot prot) {
|
||||
static_assert(static_cast<int>(Prot_read) == static_cast<int>(os::MemoryPermission_ReadOnly));
|
||||
static_assert(static_cast<int>(Prot_write) == static_cast<int>(os::MemoryPermission_WriteOnly));
|
||||
return static_cast<os::MemoryPermission>(prot & os::MemoryPermission_ReadWrite);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
errno_t virtual_alloc(void **ptr, size_t size) {
|
||||
/* Ensure size isn't too large. */
|
||||
if (size > mem::impl::MaxSize) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Allocate virtual memory. */
|
||||
uintptr_t addr;
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
if (auto err = ConvertResult(os::AllocateMemoryBlock(std::addressof(addr), util::AlignUp(size, os::MemoryBlockUnitSize))); err != 0) {
|
||||
return err;
|
||||
}
|
||||
os::SetMemoryPermission(addr, size, os::MemoryPermission_None);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t virtual_free(void *ptr, size_t size) {
|
||||
/* Ensure size isn't zero. */
|
||||
if (size == 0) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
os::FreeMemoryBlock(reinterpret_cast<uintptr_t>(ptr), util::AlignUp(size, os::MemoryBlockUnitSize));
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t physical_alloc(void *ptr, size_t size, Prot prot) {
|
||||
/* Detect empty allocation. */
|
||||
const uintptr_t aligned_start = util::AlignDown(reinterpret_cast<uintptr_t>(ptr), os::MemoryPageSize);
|
||||
const uintptr_t aligned_end = util::AlignUp(reinterpret_cast<uintptr_t>(ptr) + size, os::MemoryPageSize);
|
||||
const size_t aligned_size = aligned_end - aligned_start;
|
||||
if (aligned_end <= aligned_start) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
os::SetMemoryPermission(aligned_start, aligned_size, ConvertToOsPermission(prot));
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t physical_free(void *ptr, size_t size) {
|
||||
/* Detect empty allocation. */
|
||||
const uintptr_t aligned_start = util::AlignDown(reinterpret_cast<uintptr_t>(ptr), os::MemoryPageSize);
|
||||
const uintptr_t aligned_end = util::AlignUp(reinterpret_cast<uintptr_t>(ptr) + size, os::MemoryPageSize);
|
||||
const size_t aligned_size = aligned_end - aligned_start;
|
||||
if (aligned_end <= aligned_start) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (IsVirtualAddressMemoryEnabled()) {
|
||||
/* TODO: Support virtual address memory. */
|
||||
AMS_ABORT("Virtual address memory not supported yet");
|
||||
} else {
|
||||
os::SetMemoryPermission(aligned_start, aligned_size, os::MemoryPermission_None);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t strlcpy(char *dst, const char *src, size_t size) {
|
||||
const size_t src_size = std::strlen(src);
|
||||
if (src_size >= size) {
|
||||
if (size) {
|
||||
std::memcpy(dst, src, size - 1);
|
||||
dst[size - 1] = 0;
|
||||
}
|
||||
} else {
|
||||
std::memcpy(dst, src, src_size + 1);
|
||||
}
|
||||
return src_size;
|
||||
}
|
||||
|
||||
errno_t gen_random(void *dst, size_t dst_size) {
|
||||
os::GenerateRandomBytes(dst, dst_size);
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t epochtime(s64 *dst) {
|
||||
/* TODO: What is this calc? */
|
||||
auto ts = os::ConvertToTimeSpan(os::GetSystemTick());
|
||||
*dst = (ts.GetNanoSeconds() / INT64_C(100)) + INT64_C(0x8A09F909AE60000);
|
||||
return 0;
|
||||
}
|
||||
|
||||
errno_t getcpu(s32 *out) {
|
||||
*out = os::GetCurrentCoreNumber();
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
344
libraries/libstratosphere/source/mem/mem_standard_allocator.cpp
Normal file
344
libraries/libstratosphere/source/mem/mem_standard_allocator.cpp
Normal file
|
@ -0,0 +1,344 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "impl/mem_impl_platform.hpp"
|
||||
#include "impl/heap/mem_impl_heap_tls_heap_static.hpp"
|
||||
#include "impl/heap/mem_impl_heap_tls_heap_cache.hpp"
|
||||
#include "impl/heap/mem_impl_heap_tls_heap_central.hpp"
|
||||
|
||||
namespace ams::mem {
|
||||
|
||||
constexpr inline size_t DefaultAlignment = alignof(std::max_align_t);
|
||||
constexpr inline size_t MinimumAllocatorSize = 16_KB;
|
||||
|
||||
namespace {
|
||||
|
||||
void ThreadDestroy(uintptr_t arg) {
|
||||
if (arg) {
|
||||
reinterpret_cast<impl::heap::TlsHeapCache *>(arg)->Finalize();
|
||||
}
|
||||
}
|
||||
|
||||
ALWAYS_INLINE impl::heap::CentralHeap *GetCentral(const impl::InternalCentralHeapStorage *storage) {
|
||||
return reinterpret_cast<impl::heap::CentralHeap *>(const_cast<impl::InternalCentralHeapStorage *>(storage));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE impl::heap::CentralHeap *GetCentral(const impl::InternalCentralHeapStorage &storage) {
|
||||
return GetCentral(std::addressof(storage));
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void GetCache(impl::heap::CentralHeap *central, os::TlsSlot slot) {
|
||||
impl::heap::CachedHeap tmp_cache;
|
||||
|
||||
if (central->MakeCache(std::addressof(tmp_cache))) {
|
||||
impl::heap::TlsHeapCache *cache = tmp_cache.Release();
|
||||
os::SetTlsValue(slot, reinterpret_cast<uintptr_t>(cache));
|
||||
}
|
||||
}
|
||||
|
||||
struct InternalHash {
|
||||
size_t allocated_count;
|
||||
size_t allocated_size;
|
||||
crypto::Sha1Generator sha1;
|
||||
};
|
||||
|
||||
int InternalHashCallback(void *ptr, size_t size, void *user_data) {
|
||||
InternalHash *hash = reinterpret_cast<InternalHash *>(user_data);
|
||||
hash->sha1.Update(reinterpret_cast<void *>(std::addressof(ptr)), sizeof(ptr));
|
||||
hash->sha1.Update(reinterpret_cast<void *>(std::addressof(size)), sizeof(size));
|
||||
hash->allocated_count++;
|
||||
hash->allocated_size += size;
|
||||
return 1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
StandardAllocator::StandardAllocator() : initialized(false), enable_thread_cache(false), unused(0) {
|
||||
static_assert(sizeof(impl::heap::CentralHeap) <= sizeof(this->central_heap_storage));
|
||||
new (std::addressof(this->central_heap_storage)) impl::heap::CentralHeap;
|
||||
}
|
||||
|
||||
StandardAllocator::StandardAllocator(void *mem, size_t size) : StandardAllocator() {
|
||||
this->Initialize(mem, size);
|
||||
}
|
||||
|
||||
StandardAllocator::StandardAllocator(void *mem, size_t size, bool enable_cache) : StandardAllocator() {
|
||||
this->Initialize(mem, size, enable_cache);
|
||||
}
|
||||
|
||||
void StandardAllocator::Initialize(void *mem, size_t size) {
|
||||
this->Initialize(mem, size, false);
|
||||
}
|
||||
|
||||
void StandardAllocator::Initialize(void *mem, size_t size, bool enable_cache) {
|
||||
AMS_ABORT_UNLESS(!this->initialized);
|
||||
|
||||
const uintptr_t aligned_start = util::AlignUp(reinterpret_cast<uintptr_t>(mem), impl::heap::TlsHeapStatic::PageSize);
|
||||
const uintptr_t aligned_end = util::AlignDown(reinterpret_cast<uintptr_t>(mem) + size, impl::heap::TlsHeapStatic::PageSize);
|
||||
const size_t aligned_size = aligned_end - aligned_start;
|
||||
|
||||
if (mem == nullptr) {
|
||||
AMS_ABORT_UNLESS(os::IsVirtualAddressMemoryEnabled());
|
||||
AMS_ABORT_UNLESS(GetCentral(this->central_heap_storage)->Initialize(nullptr, size, 0) == 0);
|
||||
} else {
|
||||
AMS_ABORT_UNLESS(aligned_start < aligned_end);
|
||||
AMS_ABORT_UNLESS(aligned_size >= MinimumAllocatorSize);
|
||||
AMS_ABORT_UNLESS(GetCentral(this->central_heap_storage)->Initialize(reinterpret_cast<void *>(aligned_start), aligned_size, 0) == 0);
|
||||
}
|
||||
|
||||
this->enable_thread_cache = enable_cache;
|
||||
if (this->enable_thread_cache) {
|
||||
R_ABORT_UNLESS(os::AllocateTlsSlot(std::addressof(this->tls_slot), ThreadDestroy));
|
||||
}
|
||||
|
||||
this->initialized = true;
|
||||
}
|
||||
|
||||
void StandardAllocator::Finalize() {
|
||||
AMS_ABORT_UNLESS(this->initialized);
|
||||
|
||||
if (this->enable_thread_cache) {
|
||||
os::FreeTlsSlot(this->tls_slot);
|
||||
}
|
||||
|
||||
GetCentral(this->central_heap_storage)->Finalize();
|
||||
|
||||
this->initialized = false;
|
||||
}
|
||||
|
||||
void *StandardAllocator::Allocate(size_t size) {
|
||||
AMS_ASSERT(this->initialized);
|
||||
return this->Allocate(size, DefaultAlignment);
|
||||
}
|
||||
|
||||
void *StandardAllocator::Allocate(size_t size, size_t alignment) {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
impl::heap::TlsHeapCache *heap_cache = nullptr;
|
||||
if (this->enable_thread_cache) {
|
||||
heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
if (!heap_cache) {
|
||||
GetCache(GetCentral(this->central_heap_storage), this->tls_slot);
|
||||
heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
}
|
||||
}
|
||||
|
||||
void *ptr = nullptr;
|
||||
if (heap_cache) {
|
||||
ptr = heap_cache->Allocate(size, alignment);
|
||||
if (ptr) {
|
||||
return ptr;
|
||||
}
|
||||
|
||||
impl::heap::CachedHeap cache;
|
||||
cache.Reset(heap_cache);
|
||||
cache.Query(impl::AllocQuery_FinalizeCache);
|
||||
os::SetTlsValue(this->tls_slot, 0);
|
||||
}
|
||||
|
||||
return GetCentral(this->central_heap_storage)->Allocate(size, alignment);
|
||||
}
|
||||
|
||||
void StandardAllocator::Free(void *ptr) {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
if (ptr == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this->enable_thread_cache) {
|
||||
impl::heap::TlsHeapCache *heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
if (heap_cache) {
|
||||
heap_cache->Free(ptr);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
auto err = GetCentral(this->central_heap_storage)->Free(ptr);
|
||||
AMS_ASSERT(err == 0);
|
||||
}
|
||||
|
||||
void *StandardAllocator::Reallocate(void *ptr, size_t new_size) {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
if (new_size > impl::MaxSize) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (ptr == nullptr) {
|
||||
return this->Allocate(new_size);
|
||||
}
|
||||
|
||||
if (new_size == 0) {
|
||||
this->Free(ptr);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
size_t aligned_new_size = util::AlignUp(new_size, DefaultAlignment);
|
||||
|
||||
|
||||
impl::heap::TlsHeapCache *heap_cache = nullptr;
|
||||
if (this->enable_thread_cache) {
|
||||
heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
if (!heap_cache) {
|
||||
GetCache(GetCentral(this->central_heap_storage), this->tls_slot);
|
||||
heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
}
|
||||
}
|
||||
|
||||
void *p = nullptr;
|
||||
impl::errno_t err;
|
||||
if (heap_cache) {
|
||||
err = heap_cache->Reallocate(ptr, aligned_new_size, std::addressof(p));
|
||||
} else {
|
||||
err = GetCentral(this->central_heap_storage)->Reallocate(ptr, aligned_new_size, std::addressof(p));
|
||||
}
|
||||
|
||||
if (err == 0) {
|
||||
return p;
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
size_t StandardAllocator::Shrink(void *ptr, size_t new_size) {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
if (this->enable_thread_cache) {
|
||||
impl::heap::TlsHeapCache *heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
if (heap_cache) {
|
||||
if (heap_cache->Shrink(ptr, new_size) == 0) {
|
||||
return heap_cache->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (GetCentral(this->central_heap_storage)->Shrink(ptr, new_size) == 0) {
|
||||
return GetCentral(this->central_heap_storage)->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void StandardAllocator::ClearThreadCache() const {
|
||||
if (this->enable_thread_cache) {
|
||||
impl::heap::TlsHeapCache *heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
impl::heap::CachedHeap cache;
|
||||
cache.Reset(heap_cache);
|
||||
cache.Query(impl::AllocQuery_ClearCache);
|
||||
cache.Release();
|
||||
}
|
||||
}
|
||||
|
||||
void StandardAllocator::CleanUpManagementArea() const {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
auto err = GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_UnifyFreeList);
|
||||
AMS_ASSERT(err == 0);
|
||||
}
|
||||
|
||||
size_t StandardAllocator::GetSizeOf(const void *ptr) const {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
if (!util::IsAligned(reinterpret_cast<uintptr_t>(ptr), DefaultAlignment)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
impl::heap::TlsHeapCache *heap_cache = nullptr;
|
||||
if (this->enable_thread_cache) {
|
||||
heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
if (!heap_cache) {
|
||||
GetCache(GetCentral(this->central_heap_storage), this->tls_slot);
|
||||
heap_cache = reinterpret_cast<impl::heap::TlsHeapCache *>(os::GetTlsValue(this->tls_slot));
|
||||
}
|
||||
}
|
||||
|
||||
if (heap_cache) {
|
||||
return heap_cache->GetAllocationSize(ptr);
|
||||
} else {
|
||||
return GetCentral(this->central_heap_storage)->GetAllocationSize(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
size_t StandardAllocator::GetTotalFreeSize() const {
|
||||
size_t size = 0;
|
||||
|
||||
auto err = GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_FreeSizeMapped, std::addressof(size));
|
||||
if (err != 0) {
|
||||
err = GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_FreeSize, std::addressof(size));
|
||||
}
|
||||
AMS_ASSERT(err == 0);
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
size_t StandardAllocator::GetAllocatableSize() const {
|
||||
size_t size = 0;
|
||||
|
||||
auto err = GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_MaxAllocatableSizeMapped, std::addressof(size));
|
||||
if (err != 0) {
|
||||
err = GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_MaxAllocatableSize, std::addressof(size));
|
||||
}
|
||||
AMS_ASSERT(err == 0);
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
void StandardAllocator::WalkAllocatedBlocks(WalkCallback callback, void *user_data) const {
|
||||
AMS_ASSERT(this->initialized);
|
||||
this->ClearThreadCache();
|
||||
GetCentral(this->central_heap_storage)->WalkAllocatedPointers(callback, user_data);
|
||||
}
|
||||
|
||||
void StandardAllocator::Dump() const {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
size_t tmp;
|
||||
auto err = GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_MaxAllocatableSizeMapped, std::addressof(tmp));
|
||||
|
||||
if (err == 0) {
|
||||
GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_Dump, impl::DumpMode_Spans | impl::DumpMode_Pointers, 1);
|
||||
} else {
|
||||
GetCentral(this->central_heap_storage)->Query(impl::AllocQuery_Dump, impl::DumpMode_All, 1);
|
||||
}
|
||||
}
|
||||
|
||||
StandardAllocator::AllocatorHash StandardAllocator::Hash() const {
|
||||
AMS_ASSERT(this->initialized);
|
||||
|
||||
AllocatorHash alloc_hash;
|
||||
{
|
||||
char temp_hash[crypto::Sha1Generator::HashSize];
|
||||
InternalHash internal_hash;
|
||||
internal_hash.allocated_count = 0;
|
||||
internal_hash.allocated_size = 0;
|
||||
internal_hash.sha1.Initialize();
|
||||
|
||||
this->WalkAllocatedBlocks(InternalHashCallback, reinterpret_cast<void *>(std::addressof(internal_hash)));
|
||||
|
||||
alloc_hash.allocated_count = internal_hash.allocated_count;
|
||||
alloc_hash.allocated_size = internal_hash.allocated_size;
|
||||
|
||||
internal_hash.sha1.GetHash(temp_hash, sizeof(temp_hash));
|
||||
std::memcpy(std::addressof(alloc_hash.hash), temp_hash, sizeof(alloc_hash.hash));
|
||||
}
|
||||
return alloc_hash;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::os::impl {
|
||||
|
||||
void SetMemoryPermissionImpl(uintptr_t address, size_t size, MemoryPermission perm);
|
||||
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::os::impl {
|
||||
|
||||
namespace {
|
||||
|
||||
void SetMemoryPermissionBySvc(uintptr_t start, size_t size, svc::MemoryPermission perm) {
|
||||
uintptr_t cur_address = start;
|
||||
size_t remaining_size = size;
|
||||
while (remaining_size > 0) {
|
||||
svc::MemoryInfo mem_info;
|
||||
svc::PageInfo page_info;
|
||||
R_ABORT_UNLESS(svc::QueryMemory(std::addressof(mem_info), std::addressof(page_info), cur_address));
|
||||
|
||||
size_t cur_size = std::min(mem_info.addr + mem_info.size - cur_address, remaining_size);
|
||||
|
||||
if (mem_info.perm != perm) {
|
||||
R_ABORT_UNLESS(svc::SetMemoryPermission(cur_address, cur_size, perm));
|
||||
}
|
||||
|
||||
cur_address += cur_size;
|
||||
remaining_size -= cur_size;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void SetMemoryPermissionImpl(uintptr_t address, size_t size, MemoryPermission perm) {
|
||||
switch (perm) {
|
||||
case MemoryPermission_None:
|
||||
return SetMemoryPermissionBySvc(address, size, svc::MemoryPermission_None);
|
||||
case MemoryPermission_ReadOnly:
|
||||
return SetMemoryPermissionBySvc(address, size, svc::MemoryPermission_Read);
|
||||
case MemoryPermission_ReadWrite:
|
||||
return SetMemoryPermissionBySvc(address, size, svc::MemoryPermission_ReadWrite);
|
||||
AMS_UNREACHABLE_DEFAULT_CASE();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
28
libraries/libstratosphere/source/os/os_memory_heap.cpp
Normal file
28
libraries/libstratosphere/source/os/os_memory_heap.cpp
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
Result AllocateMemoryBlock(uintptr_t *out_address, size_t size) {
|
||||
AMS_ABORT("Not implemented yet");
|
||||
}
|
||||
|
||||
void FreeMemoryBlock(uintptr_t address, size_t size) {
|
||||
AMS_ABORT("Not implemented yet");
|
||||
}
|
||||
|
||||
}
|
25
libraries/libstratosphere/source/os/os_memory_permission.cpp
Normal file
25
libraries/libstratosphere/source/os/os_memory_permission.cpp
Normal file
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
#include "impl/os_memory_permission_impl.hpp"
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
void SetMemoryPermission(uintptr_t address, size_t size, MemoryPermission perm) {
|
||||
return impl::SetMemoryPermissionImpl(address, size, perm);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
/* TODO: How will this work without libnx? */
|
||||
|
||||
namespace {
|
||||
|
||||
using LibnxTlsDestructor = void (*)(void *);
|
||||
|
||||
}
|
||||
|
||||
Result AllocateTlsSlot(TlsSlot *out, TlsDestructor destructor) {
|
||||
s32 slot = ::threadTlsAlloc(reinterpret_cast<LibnxTlsDestructor>(destructor));
|
||||
R_UNLESS(slot >= 0, os::ResultOutOfResource());
|
||||
|
||||
*out = { static_cast<u32>(slot) };
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
void FreeTlsSlot(TlsSlot slot) {
|
||||
::threadTlsFree(static_cast<s32>(slot._value));
|
||||
}
|
||||
|
||||
uintptr_t GetTlsValue(TlsSlot slot) {
|
||||
return reinterpret_cast<uintptr_t>(::threadTlsGet(static_cast<s32>(slot._value)));
|
||||
}
|
||||
|
||||
void SetTlsValue(TlsSlot slot, uintptr_t value) {
|
||||
::threadTlsSet(static_cast<s32>(slot._value), reinterpret_cast<void *>(value));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <stratosphere.hpp>
|
||||
|
||||
namespace ams::os {
|
||||
|
||||
namespace {
|
||||
|
||||
/* TODO: Remove, add VammManager */
|
||||
size_t GetSystemResourceSize() {
|
||||
u64 v;
|
||||
if (R_SUCCEEDED(svcGetInfo(std::addressof(v), InfoType_SystemResourceSizeTotal, CUR_PROCESS_HANDLE, 0))) {
|
||||
return v;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
bool IsVirtualAddressMemoryEnabled() {
|
||||
return GetSystemResourceSize() > 0;
|
||||
}
|
||||
|
||||
}
|
|
@ -46,11 +46,11 @@ namespace ams::diag {
|
|||
#endif
|
||||
|
||||
#ifdef AMS_ENABLE_ASSERTIONS
|
||||
#define AMS_ASSERT_IMPL(expr, ...) \
|
||||
({ \
|
||||
if (const bool __tmp_ams_assert_val = (expr); AMS_UNLIKELY(!__tmp_ams_assert_val)) { \
|
||||
AMS_CALL_ASSERT_FAIL_IMPL(#expr, ## __VA_ARGS__); \
|
||||
} \
|
||||
#define AMS_ASSERT_IMPL(expr, ...) \
|
||||
({ \
|
||||
if (const bool __tmp_ams_assert_val = static_cast<bool>(expr); AMS_UNLIKELY(!__tmp_ams_assert_val)) { \
|
||||
AMS_CALL_ASSERT_FAIL_IMPL(#expr, ## __VA_ARGS__); \
|
||||
} \
|
||||
})
|
||||
#else
|
||||
#define AMS_ASSERT_IMPL(expr, ...) AMS_UNUSED(expr, ## __VA_ARGS__)
|
||||
|
@ -68,9 +68,9 @@ namespace ams::diag {
|
|||
|
||||
#define AMS_ABORT(...) AMS_CALL_ABORT_IMPL("", ## __VA_ARGS__)
|
||||
|
||||
#define AMS_ABORT_UNLESS(expr, ...) \
|
||||
({ \
|
||||
if (const bool __tmp_ams_assert_val = (expr); AMS_UNLIKELY(!__tmp_ams_assert_val)) { \
|
||||
AMS_CALL_ABORT_IMPL(#expr, ##__VA_ARGS__); \
|
||||
} \
|
||||
#define AMS_ABORT_UNLESS(expr, ...) \
|
||||
({ \
|
||||
if (const bool __tmp_ams_assert_val = static_cast<bool>(expr); AMS_UNLIKELY(!__tmp_ams_assert_val)) { \
|
||||
AMS_CALL_ABORT_IMPL(#expr, ##__VA_ARGS__); \
|
||||
} \
|
||||
})
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
#include <vapours/crypto/crypto_memory_compare.hpp>
|
||||
#include <vapours/crypto/crypto_memory_clear.hpp>
|
||||
#include <vapours/crypto/crypto_sha1_generator.hpp>
|
||||
#include <vapours/crypto/crypto_sha256_generator.hpp>
|
||||
#include <vapours/crypto/crypto_rsa_pss_sha256_verifier.hpp>
|
||||
#include <vapours/crypto/crypto_rsa_oaep_sha256_decoder.hpp>
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <vapours/common.hpp>
|
||||
#include <vapours/assert.hpp>
|
||||
#include <vapours/util.hpp>
|
||||
#include <vapours/crypto/impl/crypto_sha1_impl.hpp>
|
||||
|
||||
namespace ams::crypto {
|
||||
|
||||
class Sha1Generator {
|
||||
NON_COPYABLE(Sha1Generator);
|
||||
NON_MOVEABLE(Sha1Generator);
|
||||
private:
|
||||
using Impl = impl::Sha1Impl;
|
||||
public:
|
||||
static constexpr size_t HashSize = Impl::HashSize;
|
||||
static constexpr size_t BlockSize = Impl::BlockSize;
|
||||
|
||||
static constexpr inline u8 Asn1Identifier[] = {
|
||||
0x30, 0x21, /* Sequence, size 0x21 */
|
||||
0x30, 0x09, /* Sequence, size 0x09 */
|
||||
0x06, 0x05, /* Object Identifier */
|
||||
0x2B, 0x0E, 0x03, 0x02, 0x1A, /* SHA-1 */
|
||||
0x05, 0x00, /* Null */
|
||||
0x04, 0x14, /* Octet string, size 0x14 */
|
||||
};
|
||||
static constexpr size_t Asn1IdentifierSize = util::size(Asn1Identifier);
|
||||
private:
|
||||
Impl impl;
|
||||
public:
|
||||
Sha1Generator() { /* ... */ }
|
||||
|
||||
void Initialize() {
|
||||
this->impl.Initialize();
|
||||
}
|
||||
|
||||
void Update(const void *data, size_t size) {
|
||||
this->impl.Update(data, size);
|
||||
}
|
||||
|
||||
void GetHash(void *dst, size_t size) {
|
||||
this->impl.GetHash(dst, size);
|
||||
}
|
||||
};
|
||||
|
||||
void GenerateSha1Hash(void *dst, size_t dst_size, const void *src, size_t src_size);
|
||||
|
||||
}
|
|
@ -28,6 +28,8 @@ namespace ams::crypto {
|
|||
};
|
||||
|
||||
class Sha256Generator {
|
||||
NON_COPYABLE(Sha256Generator);
|
||||
NON_MOVEABLE(Sha256Generator);
|
||||
private:
|
||||
using Impl = impl::Sha256Impl;
|
||||
public:
|
||||
|
@ -39,7 +41,7 @@ namespace ams::crypto {
|
|||
0x30, 0x0D, /* Sequence, size 0x0D */
|
||||
0x06, 0x09, /* Object Identifier */
|
||||
0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, /* SHA-256 */
|
||||
0x00, /* Null */
|
||||
0x05, 0x00, /* Null */
|
||||
0x04, 0x20, /* Octet string, size 0x20 */
|
||||
};
|
||||
static constexpr size_t Asn1IdentifierSize = util::size(Asn1Identifier);
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <vapours/common.hpp>
|
||||
#include <vapours/assert.hpp>
|
||||
#include <vapours/util.hpp>
|
||||
#include <vapours/crypto/impl/crypto_hash_function.hpp>
|
||||
#include <vapours/crypto/crypto_memory_clear.hpp>
|
||||
|
||||
|
||||
namespace ams::crypto::impl {
|
||||
|
||||
class Sha1Impl {
|
||||
public:
|
||||
static constexpr size_t HashSize = 0x14;
|
||||
static constexpr size_t BlockSize = 0x40;
|
||||
private:
|
||||
struct State {
|
||||
u32 intermediate_hash[HashSize / sizeof(u32)];
|
||||
u8 buffer[BlockSize];
|
||||
u64 bits_consumed;
|
||||
size_t num_buffered;
|
||||
bool finalized;
|
||||
};
|
||||
private:
|
||||
State state;
|
||||
public:
|
||||
Sha1Impl() { /* ... */ }
|
||||
~Sha1Impl() {
|
||||
static_assert(std::is_trivially_destructible<State>::value);
|
||||
ClearMemory(std::addressof(this->state), sizeof(this->state));
|
||||
}
|
||||
|
||||
void Initialize();
|
||||
void Update(const void *data, size_t size);
|
||||
void GetHash(void *dst, size_t size);
|
||||
};
|
||||
|
||||
/* static_assert(HashFunction<Sha1Impl>); */
|
||||
|
||||
}
|
|
@ -60,6 +60,8 @@ namespace ams::svc {
|
|||
T pointer;
|
||||
public:
|
||||
constexpr ALWAYS_INLINE UserPointer(T p) : pointer(p) { /* ... */ }
|
||||
|
||||
constexpr ALWAYS_INLINE T GetPointerUnsafe() { return this->pointer; }
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
|
@ -168,11 +170,11 @@ namespace ams::svc {
|
|||
InitialProcessIdRangeInfo_Maximum = 1,
|
||||
};
|
||||
|
||||
enum PhysicalMemoryInfo : u64 {
|
||||
PhysicalMemoryInfo_Application = 0,
|
||||
PhysicalMemoryInfo_Applet = 1,
|
||||
PhysicalMemoryInfo_System = 2,
|
||||
PhysicalMemoryInfo_SystemUnsafe = 3,
|
||||
enum PhysicalMemorySystemInfo : u64 {
|
||||
PhysicalMemorySystemInfo_Application = 0,
|
||||
PhysicalMemorySystemInfo_Applet = 1,
|
||||
PhysicalMemorySystemInfo_System = 2,
|
||||
PhysicalMemorySystemInfo_SystemUnsafe = 3,
|
||||
};
|
||||
|
||||
enum LastThreadInfoFlag : u32 {
|
||||
|
|
28
libraries/libvapours/source/crypto/crypto_sha1_generator.cpp
Normal file
28
libraries/libvapours/source/crypto/crypto_sha1_generator.cpp
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <vapours.hpp>
|
||||
|
||||
namespace ams::crypto {
|
||||
|
||||
void GenerateSha1Hash(void *dst, size_t dst_size, const void *src, size_t src_size) {
|
||||
Sha1Generator gen;
|
||||
|
||||
gen.Initialize();
|
||||
gen.Update(src, src_size);
|
||||
gen.GetHash(dst, dst_size);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <vapours.hpp>
|
||||
|
||||
namespace ams::crypto::impl {
|
||||
|
||||
#ifdef ATMOSPHERE_IS_STRATOSPHERE
|
||||
|
||||
void Sha1Impl::Initialize() {
|
||||
static_assert(sizeof(this->state) == sizeof(::Sha1Context));
|
||||
::sha1ContextCreate(reinterpret_cast<::Sha1Context *>(std::addressof(this->state)));
|
||||
}
|
||||
|
||||
void Sha1Impl::Update(const void *data, size_t size) {
|
||||
static_assert(sizeof(this->state) == sizeof(::Sha1Context));
|
||||
::sha1ContextUpdate(reinterpret_cast<::Sha1Context *>(std::addressof(this->state)), data, size);
|
||||
}
|
||||
|
||||
void Sha1Impl::GetHash(void *dst, size_t size) {
|
||||
static_assert(sizeof(this->state) == sizeof(::Sha1Context));
|
||||
AMS_ASSERT(size >= HashSize);
|
||||
::sha1ContextGetHash(reinterpret_cast<::Sha1Context *>(std::addressof(this->state)), dst);
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
/* TODO: Non-EL0 implementation. */
|
||||
|
||||
#endif
|
||||
|
||||
}
|
|
@ -49,7 +49,7 @@ namespace ams::mitm::fs {
|
|||
void ProcessForServerOnAllThreads() {
|
||||
/* Initialize threads. */
|
||||
if constexpr (NumExtraThreads > 0) {
|
||||
const u32 priority = os::GetCurrentThreadPriority();
|
||||
const s32 priority = os::GetCurrentThreadPriority();
|
||||
for (size_t i = 0; i < NumExtraThreads; i++) {
|
||||
R_ABORT_UNLESS(g_extra_threads[i].Initialize(LoopServerThread, nullptr, g_extra_thread_stacks[i], ThreadStackSize, priority));
|
||||
}
|
||||
|
|
|
@ -139,7 +139,7 @@ int main(int argc, char **argv)
|
|||
|
||||
/* Initialize threads. */
|
||||
if constexpr (NumExtraThreads > 0) {
|
||||
const u32 priority = os::GetCurrentThreadPriority();
|
||||
const s32 priority = os::GetCurrentThreadPriority();
|
||||
for (size_t i = 0; i < NumExtraThreads; i++) {
|
||||
R_ABORT_UNLESS(g_extra_threads[i].Initialize(LoopServerThread, nullptr, g_extra_thread_stacks[i], ThreadStackSize, priority));
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue