Atmosphere/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.hpp

98 lines
3.7 KiB
C++
Raw Normal View History

/*
* Copyright (c) Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <stratosphere.hpp>
namespace ams::lmem::impl {
constexpr inline u32 ExpHeapMagic = util::ReverseFourCC<'E','X','P','H'>::Code;
constexpr inline u32 FrameHeapMagic = util::ReverseFourCC<'F','R','M','H'>::Code;
constexpr inline u32 UnitHeapMagic = util::ReverseFourCC<'U','N','T','H'>::Code;
class ScopedHeapLock {
NON_COPYABLE(ScopedHeapLock);
NON_MOVEABLE(ScopedHeapLock);
private:
2021-10-10 07:14:06 +00:00
HeapHandle m_handle;
public:
2021-10-10 07:14:06 +00:00
explicit ScopedHeapLock(HeapHandle h) : m_handle(h) {
if (m_handle->option & CreateOption_ThreadSafe) {
os::LockSdkMutex(std::addressof(m_handle->mutex));
}
}
~ScopedHeapLock() {
2021-10-10 07:14:06 +00:00
if (m_handle->option & CreateOption_ThreadSafe) {
os::UnlockSdkMutex(std::addressof(m_handle->mutex));
}
}
};
ALWAYS_INLINE MemoryRange MakeMemoryRange(void *address, size_t size) {
return MemoryRange{ .address = reinterpret_cast<uintptr_t>(address), .size = size };
}
ALWAYS_INLINE void *GetHeapStartAddress(HeapHandle handle) {
return handle->heap_start;
}
ALWAYS_INLINE size_t GetPointerDifference(const void *start, const void *end) {
return reinterpret_cast<uintptr_t>(end) - reinterpret_cast<uintptr_t>(start);
}
constexpr ALWAYS_INLINE size_t GetPointerDifference(uintptr_t start, uintptr_t end) {
return end - start;
}
void InitializeHeapHead(HeapHead *out, u32 magic, void *start, void *end, u32 option);
void FinalizeHeap(HeapHead *heap);
bool ContainsAddress(HeapHandle handle, const void *address);
size_t GetHeapTotalSize(HeapHandle handle);
/* Debug Fill */
u32 GetDebugFillValue(FillType type);
u32 SetDebugFillValue(FillType type, u32 value);
inline void FillMemory(void *dst, u32 fill_value, size_t size) {
/* All heap blocks must be at least 32-bit aligned. */
2021-09-30 05:02:58 +00:00
AMS_ASSERT(util::IsAligned(reinterpret_cast<uintptr_t>(dst), alignof(u32)));
AMS_ASSERT(util::IsAligned(size, sizeof(u32)));
for (size_t i = 0; i < size / sizeof(fill_value); i++) {
reinterpret_cast<u32 *>(dst)[i] = fill_value;
}
}
inline void FillUnallocatedMemory(HeapHead *heap, void *address, size_t size) {
if (heap->option & CreateOption_DebugFill) {
FillMemory(address, impl::GetDebugFillValue(FillType_Unallocated), size);
}
}
inline void FillAllocatedMemory(HeapHead *heap, void *address, size_t size) {
if (heap->option & CreateOption_ZeroClear) {
FillMemory(address, 0, size);
} else if (heap->option & CreateOption_DebugFill) {
FillMemory(address, impl::GetDebugFillValue(FillType_Allocated), size);
}
}
inline void FillFreedMemory(HeapHead *heap, void *address, size_t size) {
if (heap->option & CreateOption_DebugFill) {
FillMemory(address, impl::GetDebugFillValue(FillType_Freed), size);
}
}
}