2020-02-07 12:58:35 +00:00
|
|
|
/*
|
2021-10-04 19:59:10 +00:00
|
|
|
* Copyright (c) Atmosphère-NX
|
2020-02-07 12:58:35 +00:00
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#pragma once
|
|
|
|
#include <mesosphere/kern_common.hpp>
|
|
|
|
#include <mesosphere/kern_k_slab_heap.hpp>
|
|
|
|
#include <mesosphere/kern_k_page_group.hpp>
|
|
|
|
#include <mesosphere/kern_k_memory_block.hpp>
|
2020-04-19 00:10:26 +00:00
|
|
|
#include <mesosphere/kern_k_dynamic_page_manager.hpp>
|
2020-02-07 12:58:35 +00:00
|
|
|
|
|
|
|
namespace ams::kern {
|
|
|
|
|
2021-06-17 20:03:46 +00:00
|
|
|
template<typename T, bool ClearNode = false>
|
2021-09-18 05:01:58 +00:00
|
|
|
class KDynamicSlabHeap : protected impl::KSlabHeapImpl {
|
2020-02-07 12:58:35 +00:00
|
|
|
NON_COPYABLE(KDynamicSlabHeap);
|
|
|
|
NON_MOVEABLE(KDynamicSlabHeap);
|
|
|
|
private:
|
2020-04-19 00:10:26 +00:00
|
|
|
using PageBuffer = KDynamicPageManager::PageBuffer;
|
2020-02-07 12:58:35 +00:00
|
|
|
private:
|
2021-09-18 05:01:58 +00:00
|
|
|
std::atomic<size_t> m_used{};
|
|
|
|
std::atomic<size_t> m_peak{};
|
|
|
|
std::atomic<size_t> m_count{};
|
|
|
|
KVirtualAddress m_address{};
|
|
|
|
size_t m_size{};
|
2020-02-07 12:58:35 +00:00
|
|
|
public:
|
2021-09-18 05:01:58 +00:00
|
|
|
constexpr KDynamicSlabHeap() = default;
|
2020-02-07 12:58:35 +00:00
|
|
|
|
2021-09-18 05:01:58 +00:00
|
|
|
constexpr ALWAYS_INLINE KVirtualAddress GetAddress() const { return m_address; }
|
|
|
|
constexpr ALWAYS_INLINE size_t GetSize() const { return m_size; }
|
|
|
|
constexpr ALWAYS_INLINE size_t GetUsed() const { return m_used.load(); }
|
|
|
|
constexpr ALWAYS_INLINE size_t GetPeak() const { return m_peak.load(); }
|
|
|
|
constexpr ALWAYS_INLINE size_t GetCount() const { return m_count.load(); }
|
2020-02-07 12:58:35 +00:00
|
|
|
|
2021-09-18 05:01:58 +00:00
|
|
|
constexpr ALWAYS_INLINE bool IsInRange(KVirtualAddress addr) const {
|
2020-02-07 12:58:35 +00:00
|
|
|
return this->GetAddress() <= addr && addr <= this->GetAddress() + this->GetSize() - 1;
|
|
|
|
}
|
|
|
|
|
2021-09-18 05:01:58 +00:00
|
|
|
ALWAYS_INLINE void Initialize(KDynamicPageManager *page_allocator, size_t num_objects) {
|
2020-04-19 07:35:05 +00:00
|
|
|
MESOSPHERE_ASSERT(page_allocator != nullptr);
|
|
|
|
|
|
|
|
/* Initialize members. */
|
2021-09-18 05:01:58 +00:00
|
|
|
m_address = page_allocator->GetAddress();
|
|
|
|
m_size = page_allocator->GetSize();
|
|
|
|
|
|
|
|
/* Initialize the base allocator. */
|
|
|
|
KSlabHeapImpl::Initialize();
|
2020-04-19 07:35:05 +00:00
|
|
|
|
|
|
|
/* Allocate until we have the correct number of objects. */
|
2021-01-08 10:13:36 +00:00
|
|
|
while (m_count.load() < num_objects) {
|
2021-09-18 05:01:58 +00:00
|
|
|
auto *allocated = reinterpret_cast<T *>(page_allocator->Allocate());
|
2020-04-19 07:35:05 +00:00
|
|
|
MESOSPHERE_ABORT_UNLESS(allocated != nullptr);
|
2021-09-18 05:01:58 +00:00
|
|
|
|
2020-04-19 07:35:05 +00:00
|
|
|
for (size_t i = 0; i < sizeof(PageBuffer) / sizeof(T); i++) {
|
2021-09-18 05:01:58 +00:00
|
|
|
KSlabHeapImpl::Free(allocated + i);
|
2020-04-19 07:35:05 +00:00
|
|
|
}
|
2021-09-18 05:01:58 +00:00
|
|
|
|
2021-01-08 10:13:36 +00:00
|
|
|
m_count.fetch_add(sizeof(PageBuffer) / sizeof(T));
|
2020-04-19 07:35:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-18 05:01:58 +00:00
|
|
|
ALWAYS_INLINE T *Allocate(KDynamicPageManager *page_allocator) {
|
|
|
|
T *allocated = static_cast<T *>(KSlabHeapImpl::Allocate());
|
2020-02-07 12:58:35 +00:00
|
|
|
|
2021-06-17 20:03:46 +00:00
|
|
|
/* If we successfully allocated and we should clear the node, do so. */
|
|
|
|
if constexpr (ClearNode) {
|
|
|
|
if (AMS_LIKELY(allocated != nullptr)) {
|
2021-09-18 05:01:58 +00:00
|
|
|
reinterpret_cast<KSlabHeapImpl::Node *>(allocated)->next = nullptr;
|
2021-06-17 20:03:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-07 12:58:35 +00:00
|
|
|
/* If we fail to allocate, try to get a new page from our next allocator. */
|
2021-09-18 05:01:58 +00:00
|
|
|
if (AMS_UNLIKELY(allocated == nullptr) ) {
|
|
|
|
if (page_allocator != nullptr) {
|
|
|
|
allocated = reinterpret_cast<T *>(page_allocator->Allocate());
|
2020-02-07 12:58:35 +00:00
|
|
|
if (allocated != nullptr) {
|
|
|
|
/* If we succeeded in getting a page, free the rest to our slab. */
|
|
|
|
for (size_t i = 1; i < sizeof(PageBuffer) / sizeof(T); i++) {
|
2021-09-18 05:01:58 +00:00
|
|
|
KSlabHeapImpl::Free(allocated + i);
|
2020-02-07 12:58:35 +00:00
|
|
|
}
|
2021-01-08 10:13:36 +00:00
|
|
|
m_count.fetch_add(sizeof(PageBuffer) / sizeof(T));
|
2020-02-07 12:58:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (AMS_LIKELY(allocated != nullptr)) {
|
2020-02-14 01:38:56 +00:00
|
|
|
/* Construct the object. */
|
2021-03-22 03:30:40 +00:00
|
|
|
std::construct_at(allocated);
|
2020-02-14 01:38:56 +00:00
|
|
|
|
|
|
|
/* Update our tracking. */
|
2021-01-08 10:13:36 +00:00
|
|
|
size_t used = m_used.fetch_add(1) + 1;
|
|
|
|
size_t peak = m_peak.load();
|
2020-02-07 12:58:35 +00:00
|
|
|
while (peak < used) {
|
2020-12-18 01:18:47 +00:00
|
|
|
if (m_peak.compare_exchange_weak(peak, used, std::memory_order_relaxed)) {
|
2020-02-07 12:58:35 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return allocated;
|
|
|
|
}
|
|
|
|
|
2021-09-18 05:01:58 +00:00
|
|
|
ALWAYS_INLINE void Free(T *t) {
|
|
|
|
KSlabHeapImpl::Free(t);
|
2021-01-08 10:13:36 +00:00
|
|
|
m_used.fetch_sub(1);
|
2020-02-07 12:58:35 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|