2020-01-30 07:06:25 +01:00
|
|
|
/*
|
2021-10-04 21:59:10 +02:00
|
|
|
* Copyright (c) Atmosphère-NX
|
2020-01-30 07:06:25 +01:00
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#pragma once
|
2020-01-31 00:29:51 +01:00
|
|
|
#include <mesosphere/kern_common.hpp>
|
2020-01-30 07:06:25 +01:00
|
|
|
#include <mesosphere/kern_k_typed_address.hpp>
|
2021-09-18 07:01:58 +02:00
|
|
|
#include <mesosphere/kern_k_memory_layout.hpp>
|
2020-01-30 07:06:25 +01:00
|
|
|
|
2020-07-29 11:29:46 +02:00
|
|
|
#if defined(ATMOSPHERE_ARCH_ARM64)
|
|
|
|
|
|
|
|
#include <mesosphere/arch/arm64/kern_k_slab_heap_impl.hpp>
|
|
|
|
namespace ams::kern {
|
2021-09-18 07:01:58 +02:00
|
|
|
using ams::kern::arch::arm64::IsSlabAtomicValid;
|
2020-07-29 11:29:46 +02:00
|
|
|
using ams::kern::arch::arm64::AllocateFromSlabAtomic;
|
|
|
|
using ams::kern::arch::arm64::FreeToSlabAtomic;
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
|
|
|
#error "Unknown architecture for KSlabHeapImpl"
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2020-01-30 07:06:25 +01:00
|
|
|
namespace ams::kern {
|
|
|
|
|
|
|
|
namespace impl {
|
|
|
|
|
|
|
|
class KSlabHeapImpl {
|
|
|
|
NON_COPYABLE(KSlabHeapImpl);
|
|
|
|
NON_MOVEABLE(KSlabHeapImpl);
|
|
|
|
public:
|
|
|
|
struct Node {
|
|
|
|
Node *next;
|
|
|
|
};
|
|
|
|
private:
|
2021-09-18 07:01:58 +02:00
|
|
|
Node *m_head{nullptr};
|
2020-01-30 07:06:25 +01:00
|
|
|
public:
|
2021-09-18 07:01:58 +02:00
|
|
|
constexpr KSlabHeapImpl() = default;
|
2020-01-30 07:06:25 +01:00
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
void Initialize() {
|
|
|
|
MESOSPHERE_ABORT_UNLESS(m_head == nullptr);
|
|
|
|
MESOSPHERE_ABORT_UNLESS(IsSlabAtomicValid());
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE Node *GetHead() const {
|
2020-12-18 02:18:47 +01:00
|
|
|
return m_head;
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE void *Allocate() {
|
2020-12-18 02:18:47 +01:00
|
|
|
return AllocateFromSlabAtomic(std::addressof(m_head));
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE void Free(void *obj) {
|
|
|
|
return FreeToSlabAtomic(std::addressof(m_head), static_cast<Node *>(obj));
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
template<bool SupportDynamicExpansion>
|
|
|
|
class KSlabHeapBase : protected impl::KSlabHeapImpl {
|
2020-01-30 07:06:25 +01:00
|
|
|
NON_COPYABLE(KSlabHeapBase);
|
|
|
|
NON_MOVEABLE(KSlabHeapBase);
|
|
|
|
private:
|
2021-09-18 07:01:58 +02:00
|
|
|
size_t m_obj_size{};
|
|
|
|
uintptr_t m_peak{};
|
|
|
|
uintptr_t m_start{};
|
|
|
|
uintptr_t m_end{};
|
2020-01-30 07:06:25 +01:00
|
|
|
private:
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE void UpdatePeakImpl(uintptr_t obj) {
|
|
|
|
static_assert(std::atomic_ref<uintptr_t>::is_always_lock_free);
|
|
|
|
std::atomic_ref<uintptr_t> peak_ref(m_peak);
|
|
|
|
|
|
|
|
const uintptr_t alloc_peak = obj + this->GetObjectSize();
|
|
|
|
uintptr_t cur_peak = m_peak;
|
|
|
|
do {
|
|
|
|
if (alloc_peak <= cur_peak) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} while (!peak_ref.compare_exchange_strong(cur_peak, alloc_peak));
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
public:
|
2021-09-18 07:01:58 +02:00
|
|
|
constexpr KSlabHeapBase() = default;
|
2020-01-30 07:06:25 +01:00
|
|
|
|
|
|
|
ALWAYS_INLINE bool Contains(uintptr_t address) const {
|
2020-12-18 02:18:47 +01:00
|
|
|
return m_start <= address && address < m_end;
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
void Initialize(size_t obj_size, void *memory, size_t memory_size) {
|
2020-01-30 07:06:25 +01:00
|
|
|
/* Ensure we don't initialize a slab using null memory. */
|
|
|
|
MESOSPHERE_ABORT_UNLESS(memory != nullptr);
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
/* Set our object size. */
|
|
|
|
m_obj_size = obj_size;
|
|
|
|
|
2020-01-30 07:06:25 +01:00
|
|
|
/* Initialize the base allocator. */
|
2021-09-18 07:01:58 +02:00
|
|
|
KSlabHeapImpl::Initialize();
|
2020-01-30 07:06:25 +01:00
|
|
|
|
|
|
|
/* Set our tracking variables. */
|
|
|
|
const size_t num_obj = (memory_size / obj_size);
|
2020-12-18 02:18:47 +01:00
|
|
|
m_start = reinterpret_cast<uintptr_t>(memory);
|
2021-09-18 07:01:58 +02:00
|
|
|
m_end = m_start + num_obj * obj_size;
|
2020-12-18 02:18:47 +01:00
|
|
|
m_peak = m_start;
|
2020-01-30 07:06:25 +01:00
|
|
|
|
|
|
|
/* Free the objects. */
|
2020-12-18 02:18:47 +01:00
|
|
|
u8 *cur = reinterpret_cast<u8 *>(m_end);
|
2020-01-30 07:06:25 +01:00
|
|
|
|
|
|
|
for (size_t i = 0; i < num_obj; i++) {
|
|
|
|
cur -= obj_size;
|
2021-09-18 07:01:58 +02:00
|
|
|
KSlabHeapImpl::Free(cur);
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE size_t GetSlabHeapSize() const {
|
2020-12-18 02:18:47 +01:00
|
|
|
return (m_end - m_start) / this->GetObjectSize();
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE size_t GetObjectSize() const {
|
|
|
|
return m_obj_size;
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE void *Allocate() {
|
|
|
|
void *obj = KSlabHeapImpl::Allocate();
|
2020-01-30 07:06:25 +01:00
|
|
|
|
2020-12-10 12:31:57 +01:00
|
|
|
/* Track the allocated peak. */
|
|
|
|
#if defined(MESOSPHERE_BUILD_FOR_DEBUGGING)
|
|
|
|
if (AMS_LIKELY(obj != nullptr)) {
|
2021-09-18 07:01:58 +02:00
|
|
|
if constexpr (SupportDynamicExpansion) {
|
|
|
|
if (this->Contains(reinterpret_cast<uintptr_t>(obj))) {
|
|
|
|
this->UpdatePeakImpl(reinterpret_cast<uintptr_t>(obj));
|
|
|
|
} else {
|
|
|
|
this->UpdatePeakImpl(reinterpret_cast<uintptr_t>(m_end) - this->GetObjectSize());
|
2020-12-10 12:31:57 +01:00
|
|
|
}
|
2021-09-18 07:01:58 +02:00
|
|
|
} else {
|
|
|
|
this->UpdatePeakImpl(reinterpret_cast<uintptr_t>(obj));
|
|
|
|
}
|
2020-12-10 12:31:57 +01:00
|
|
|
}
|
|
|
|
#endif
|
2020-01-30 07:06:25 +01:00
|
|
|
|
|
|
|
return obj;
|
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE void Free(void *obj) {
|
2020-01-30 07:06:25 +01:00
|
|
|
/* Don't allow freeing an object that wasn't allocated from this heap. */
|
2021-09-18 07:01:58 +02:00
|
|
|
const bool contained = this->Contains(reinterpret_cast<uintptr_t>(obj));
|
|
|
|
if constexpr (SupportDynamicExpansion) {
|
|
|
|
const bool is_slab = KMemoryLayout::GetSlabRegion().Contains(reinterpret_cast<uintptr_t>(obj));
|
|
|
|
MESOSPHERE_ABORT_UNLESS(contained || is_slab);
|
|
|
|
} else {
|
|
|
|
MESOSPHERE_ABORT_UNLESS(contained);
|
|
|
|
}
|
2020-01-30 07:06:25 +01:00
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
KSlabHeapImpl::Free(obj);
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE size_t GetObjectIndex(const void *obj) const {
|
|
|
|
if constexpr (SupportDynamicExpansion) {
|
|
|
|
if (!this->Contains(reinterpret_cast<uintptr_t>(obj))) {
|
|
|
|
return std::numeric_limits<size_t>::max();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-18 02:18:47 +01:00
|
|
|
return (reinterpret_cast<uintptr_t>(obj) - m_start) / this->GetObjectSize();
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE size_t GetPeakIndex() const {
|
|
|
|
return this->GetObjectIndex(reinterpret_cast<const void *>(m_peak));
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE uintptr_t GetSlabHeapAddress() const {
|
2020-12-18 02:18:47 +01:00
|
|
|
return m_start;
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
2020-12-10 12:31:57 +01:00
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE size_t GetNumRemaining() const {
|
2020-12-10 12:31:57 +01:00
|
|
|
size_t remaining = 0;
|
|
|
|
|
|
|
|
/* Only calculate the number of remaining objects under debug configuration. */
|
|
|
|
#if defined(MESOSPHERE_BUILD_FOR_DEBUGGING)
|
|
|
|
while (true) {
|
2021-09-18 07:01:58 +02:00
|
|
|
auto *cur = this->GetHead();
|
2020-12-10 12:31:57 +01:00
|
|
|
remaining = 0;
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
if constexpr (SupportDynamicExpansion) {
|
|
|
|
const auto &slab_region = KMemoryLayout::GetSlabRegion();
|
|
|
|
|
|
|
|
while (this->Contains(reinterpret_cast<uintptr_t>(cur)) || slab_region.Contains(reinterpret_cast<uintptr_t>(cur))) {
|
|
|
|
++remaining;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
while (this->Contains(reinterpret_cast<uintptr_t>(cur))) {
|
|
|
|
++remaining;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
2020-12-10 12:31:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (cur == nullptr) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return remaining;
|
|
|
|
}
|
2020-01-30 07:06:25 +01:00
|
|
|
};
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
template<typename T, bool SupportDynamicExpansion>
|
|
|
|
class KSlabHeap : public KSlabHeapBase<SupportDynamicExpansion> {
|
|
|
|
private:
|
|
|
|
using BaseHeap = KSlabHeapBase<SupportDynamicExpansion>;
|
2020-01-30 07:06:25 +01:00
|
|
|
public:
|
2021-09-18 07:01:58 +02:00
|
|
|
constexpr KSlabHeap() = default;
|
2020-01-30 07:06:25 +01:00
|
|
|
|
|
|
|
void Initialize(void *memory, size_t memory_size) {
|
2021-09-18 07:01:58 +02:00
|
|
|
BaseHeap::Initialize(sizeof(T), memory, memory_size);
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE T *Allocate() {
|
|
|
|
T *obj = static_cast<T *>(BaseHeap::Allocate());
|
2020-01-30 07:06:25 +01:00
|
|
|
if (AMS_LIKELY(obj != nullptr)) {
|
2021-03-22 04:30:40 +01:00
|
|
|
std::construct_at(obj);
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
return obj;
|
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE void Free(T *obj) {
|
|
|
|
BaseHeap::Free(obj);
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
|
2021-09-18 07:01:58 +02:00
|
|
|
ALWAYS_INLINE size_t GetObjectIndex(const T *obj) const {
|
|
|
|
return BaseHeap::GetObjectIndex(obj);
|
2020-01-30 07:06:25 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|