2020-01-29 07:09:47 +01:00
|
|
|
/*
|
2021-10-04 21:59:10 +02:00
|
|
|
* Copyright (c) Atmosphère-NX
|
2020-01-29 07:09:47 +01:00
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#include <mesosphere.hpp>
|
|
|
|
|
|
|
|
namespace ams::kern {
|
|
|
|
|
2020-08-03 21:06:24 +02:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
class KMemoryRegionAllocator {
|
|
|
|
NON_COPYABLE(KMemoryRegionAllocator);
|
|
|
|
NON_MOVEABLE(KMemoryRegionAllocator);
|
|
|
|
public:
|
2020-12-02 02:31:21 +01:00
|
|
|
static constexpr size_t MaxMemoryRegions = 200;
|
2020-08-03 21:06:24 +02:00
|
|
|
private:
|
2021-10-10 00:46:04 +02:00
|
|
|
KMemoryRegion m_region_heap[MaxMemoryRegions];
|
|
|
|
size_t m_num_regions;
|
2020-08-03 21:06:24 +02:00
|
|
|
public:
|
2021-10-10 00:46:04 +02:00
|
|
|
constexpr ALWAYS_INLINE KMemoryRegionAllocator() : m_region_heap(), m_num_regions() { /* ... */ }
|
2020-08-03 21:06:24 +02:00
|
|
|
public:
|
|
|
|
template<typename... Args>
|
|
|
|
ALWAYS_INLINE KMemoryRegion *Allocate(Args&&... args) {
|
|
|
|
/* Ensure we stay within the bounds of our heap. */
|
2021-10-10 00:46:04 +02:00
|
|
|
MESOSPHERE_INIT_ABORT_UNLESS(m_num_regions < MaxMemoryRegions);
|
2020-08-03 21:06:24 +02:00
|
|
|
|
|
|
|
/* Create the new region. */
|
2021-10-10 00:46:04 +02:00
|
|
|
KMemoryRegion *region = std::addressof(m_region_heap[m_num_regions++]);
|
2021-03-22 04:30:40 +01:00
|
|
|
std::construct_at(region, std::forward<Args>(args)...);
|
2020-08-03 21:06:24 +02:00
|
|
|
|
|
|
|
return region;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
constinit KMemoryRegionAllocator g_memory_region_allocator;
|
|
|
|
|
|
|
|
template<typename... Args>
|
|
|
|
ALWAYS_INLINE KMemoryRegion *AllocateRegion(Args&&... args) {
|
|
|
|
return g_memory_region_allocator.Allocate(std::forward<Args>(args)...);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2020-12-02 01:42:25 +01:00
|
|
|
void KMemoryRegionTree::InsertDirectly(uintptr_t address, uintptr_t last_address, u32 attr, u32 type_id) {
|
|
|
|
this->insert(*AllocateRegion(address, last_address, attr, type_id));
|
2020-08-03 21:06:24 +02:00
|
|
|
}
|
|
|
|
|
2020-02-05 23:16:56 +01:00
|
|
|
bool KMemoryRegionTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) {
|
|
|
|
/* Locate the memory region that contains the address. */
|
2020-08-03 21:06:24 +02:00
|
|
|
KMemoryRegion *found = this->FindModifiable(address);
|
2020-01-29 07:09:47 +01:00
|
|
|
|
|
|
|
/* We require that the old attr is correct. */
|
2020-08-03 21:06:24 +02:00
|
|
|
if (found->GetAttributes() != old_attr) {
|
2020-01-29 07:09:47 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-02-05 23:16:56 +01:00
|
|
|
/* We further require that the region can be split from the old region. */
|
|
|
|
const uintptr_t inserted_region_end = address + size;
|
|
|
|
const uintptr_t inserted_region_last = inserted_region_end - 1;
|
2020-08-03 21:06:24 +02:00
|
|
|
if (found->GetLastAddress() < inserted_region_last) {
|
2020-01-29 07:09:47 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Further, we require that the type id is a valid transformation. */
|
2020-08-03 21:06:24 +02:00
|
|
|
if (!found->CanDerive(type_id)) {
|
2020-01-29 07:09:47 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-02-05 23:16:56 +01:00
|
|
|
/* Cache information from the region before we remove it. */
|
2020-08-03 21:06:24 +02:00
|
|
|
const uintptr_t old_address = found->GetAddress();
|
2020-12-02 01:42:25 +01:00
|
|
|
const uintptr_t old_last = found->GetLastAddress();
|
2020-08-03 21:06:24 +02:00
|
|
|
const uintptr_t old_pair = found->GetPairAddress();
|
|
|
|
const u32 old_type = found->GetType();
|
2020-01-29 07:09:47 +01:00
|
|
|
|
2020-02-05 23:16:56 +01:00
|
|
|
/* Erase the existing region from the tree. */
|
2020-08-03 21:06:24 +02:00
|
|
|
this->erase(this->iterator_to(*found));
|
2020-01-29 07:09:47 +01:00
|
|
|
|
2020-08-03 21:06:24 +02:00
|
|
|
/* Insert the new region into the tree. */
|
|
|
|
if (old_address == address) {
|
|
|
|
/* Reuse the old object for the new region, if we can. */
|
2020-12-02 01:42:25 +01:00
|
|
|
found->Reset(address, inserted_region_last, old_pair, new_attr, type_id);
|
2020-08-03 21:06:24 +02:00
|
|
|
this->insert(*found);
|
|
|
|
} else {
|
|
|
|
/* If we can't re-use, adjust the old region. */
|
2020-12-02 01:42:25 +01:00
|
|
|
found->Reset(old_address, address - 1, old_pair, old_attr, old_type);
|
2020-08-03 21:06:24 +02:00
|
|
|
this->insert(*found);
|
|
|
|
|
|
|
|
/* Insert a new region for the split. */
|
2020-12-02 01:42:25 +01:00
|
|
|
const uintptr_t new_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (address - old_address) : old_pair;
|
|
|
|
this->insert(*AllocateRegion(address, inserted_region_last, new_pair, new_attr, type_id));
|
2020-08-03 21:06:24 +02:00
|
|
|
}
|
2020-01-29 07:09:47 +01:00
|
|
|
|
2020-02-05 23:16:56 +01:00
|
|
|
/* If we need to insert a region after the region, do so. */
|
|
|
|
if (old_last != inserted_region_last) {
|
|
|
|
const uintptr_t after_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (inserted_region_end - old_address) : old_pair;
|
2020-12-02 01:42:25 +01:00
|
|
|
this->insert(*AllocateRegion(inserted_region_end, old_last, after_pair, old_attr, old_type));
|
2020-01-29 07:09:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-04-22 04:24:41 +02:00
|
|
|
void KMemoryLayout::InitializeLinearMemoryRegionTrees() {
|
2020-01-29 07:09:47 +01:00
|
|
|
/* Initialize linear trees. */
|
2020-02-05 23:16:56 +01:00
|
|
|
for (auto ®ion : GetPhysicalMemoryRegionTree()) {
|
2020-08-03 21:06:24 +02:00
|
|
|
if (region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
|
2020-12-02 01:42:25 +01:00
|
|
|
GetPhysicalLinearMemoryRegionTree().InsertDirectly(region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), region.GetType());
|
2020-01-29 07:09:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-05 23:16:56 +01:00
|
|
|
for (auto ®ion : GetVirtualMemoryRegionTree()) {
|
2020-08-03 21:06:24 +02:00
|
|
|
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) {
|
2020-12-02 01:42:25 +01:00
|
|
|
GetVirtualLinearMemoryRegionTree().InsertDirectly(region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), region.GetType());
|
2020-01-29 07:09:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-18 01:45:41 +02:00
|
|
|
size_t KMemoryLayout::GetResourceRegionSizeForInit() {
|
|
|
|
/* Calculate resource region size based on whether we allow extra threads. */
|
|
|
|
const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit();
|
|
|
|
|
2021-04-07 21:48:20 +02:00
|
|
|
return KernelResourceSize + (use_extra_resources ? KernelSlabHeapAdditionalSize : 0);
|
2020-08-18 01:45:41 +02:00
|
|
|
}
|
|
|
|
|
2020-01-29 07:09:47 +01:00
|
|
|
}
|