2020-01-29 10:49:04 +01:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020 Atmosphère-NX
|
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#pragma once
|
2020-01-31 00:29:51 +01:00
|
|
|
#include <mesosphere/kern_common.hpp>
|
2020-04-19 02:10:26 +02:00
|
|
|
#include <mesosphere/kern_k_page_bitmap.hpp>
|
2020-01-29 10:49:04 +01:00
|
|
|
|
|
|
|
namespace ams::kern {
|
|
|
|
|
|
|
|
class KPageHeap {
|
2020-02-07 02:40:57 +01:00
|
|
|
private:
|
|
|
|
static constexpr inline size_t MemoryBlockPageShifts[] = { 0xC, 0x10, 0x15, 0x16, 0x19, 0x1D, 0x1E };
|
|
|
|
static constexpr size_t NumMemoryBlockPageShifts = util::size(MemoryBlockPageShifts);
|
2020-02-07 05:36:26 +01:00
|
|
|
public:
|
|
|
|
static constexpr s32 GetAlignedBlockIndex(size_t num_pages, size_t align_pages) {
|
|
|
|
const size_t target_pages = std::max(num_pages, align_pages);
|
|
|
|
for (size_t i = 0; i < NumMemoryBlockPageShifts; i++) {
|
|
|
|
if (target_pages <= (size_t(1) << MemoryBlockPageShifts[i]) / PageSize) {
|
|
|
|
return static_cast<s32>(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static constexpr s32 GetBlockIndex(size_t num_pages) {
|
2020-02-14 02:38:56 +01:00
|
|
|
for (s32 i = static_cast<s32>(NumMemoryBlockPageShifts) - 1; i >= 0; i--) {
|
2020-02-07 05:36:26 +01:00
|
|
|
if (num_pages >= (size_t(1) << MemoryBlockPageShifts[i]) / PageSize) {
|
2020-02-14 02:38:56 +01:00
|
|
|
return i;
|
2020-02-07 05:36:26 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static constexpr size_t GetBlockSize(size_t index) {
|
|
|
|
return size_t(1) << MemoryBlockPageShifts[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
static constexpr size_t GetBlockNumPages(size_t index) {
|
|
|
|
return GetBlockSize(index) / PageSize;
|
|
|
|
}
|
2020-01-29 10:49:04 +01:00
|
|
|
private:
|
|
|
|
class Block {
|
|
|
|
private:
|
2020-04-19 02:10:26 +02:00
|
|
|
KPageBitmap bitmap;
|
2020-02-07 02:40:57 +01:00
|
|
|
KVirtualAddress heap_address;
|
|
|
|
uintptr_t end_offset;
|
|
|
|
size_t block_shift;
|
|
|
|
size_t next_block_shift;
|
|
|
|
public:
|
2020-04-19 02:10:26 +02:00
|
|
|
Block() : bitmap(), heap_address(), end_offset(), block_shift(), next_block_shift() { /* ... */ }
|
2020-02-07 02:40:57 +01:00
|
|
|
|
2020-02-07 05:36:26 +01:00
|
|
|
constexpr size_t GetShift() const { return this->block_shift; }
|
|
|
|
constexpr size_t GetNextShift() const { return this->next_block_shift; }
|
|
|
|
constexpr size_t GetSize() const { return u64(1) << this->GetShift(); }
|
|
|
|
constexpr size_t GetNumPages() const { return this->GetSize() / PageSize; }
|
|
|
|
constexpr size_t GetNumFreeBlocks() const { return this->bitmap.GetNumBits(); }
|
|
|
|
constexpr size_t GetNumFreePages() const { return this->GetNumFreeBlocks() * this->GetNumPages(); }
|
|
|
|
|
2020-02-07 02:40:57 +01:00
|
|
|
u64 *Initialize(KVirtualAddress addr, size_t size, size_t bs, size_t nbs, u64 *bit_storage) {
|
|
|
|
/* Set shifts. */
|
|
|
|
this->block_shift = bs;
|
|
|
|
this->next_block_shift = nbs;
|
|
|
|
|
|
|
|
/* Align up the address. */
|
|
|
|
KVirtualAddress end = addr + size;
|
2020-03-31 04:27:02 +02:00
|
|
|
const size_t align = (this->next_block_shift != 0) ? (u64(1) << this->next_block_shift) : (u64(1) << this->block_shift);
|
2020-02-07 02:40:57 +01:00
|
|
|
addr = util::AlignDown(GetInteger(addr), align);
|
|
|
|
end = util::AlignUp(GetInteger(end), align);
|
|
|
|
|
|
|
|
this->heap_address = addr;
|
2020-02-07 05:36:26 +01:00
|
|
|
this->end_offset = (end - addr) / (u64(1) << this->block_shift);
|
2020-02-07 02:40:57 +01:00
|
|
|
return this->bitmap.Initialize(bit_storage, this->end_offset);
|
|
|
|
}
|
2020-02-07 05:36:26 +01:00
|
|
|
|
|
|
|
KVirtualAddress PushBlock(KVirtualAddress address) {
|
|
|
|
/* Set the bit for the free block. */
|
|
|
|
size_t offset = (address - this->heap_address) >> this->GetShift();
|
|
|
|
this->bitmap.SetBit(offset);
|
|
|
|
|
|
|
|
/* If we have a next shift, try to clear the blocks below this one and return the new address. */
|
|
|
|
if (this->GetNextShift()) {
|
|
|
|
const size_t diff = u64(1) << (this->GetNextShift() - this->GetShift());
|
|
|
|
offset = util::AlignDown(offset, diff);
|
|
|
|
if (this->bitmap.ClearRange(offset, diff)) {
|
|
|
|
return this->heap_address + (offset << this->GetShift());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* We couldn't coalesce, or we're already as big as possible. */
|
|
|
|
return Null<KVirtualAddress>;
|
|
|
|
}
|
|
|
|
|
2020-04-19 02:10:26 +02:00
|
|
|
KVirtualAddress PopBlock(bool random) {
|
2020-02-07 05:36:26 +01:00
|
|
|
/* Find a free block. */
|
2020-04-19 02:10:26 +02:00
|
|
|
ssize_t soffset = this->bitmap.FindFreeBlock(random);
|
2020-02-07 05:36:26 +01:00
|
|
|
if (soffset < 0) {
|
|
|
|
return Null<KVirtualAddress>;
|
|
|
|
}
|
|
|
|
const size_t offset = static_cast<size_t>(soffset);
|
|
|
|
|
|
|
|
/* Update our tracking and return it. */
|
|
|
|
this->bitmap.ClearBit(offset);
|
|
|
|
return this->heap_address + (offset << this->GetShift());
|
|
|
|
}
|
2020-01-29 10:49:04 +01:00
|
|
|
public:
|
|
|
|
static constexpr size_t CalculateMetadataOverheadSize(size_t region_size, size_t cur_block_shift, size_t next_block_shift) {
|
2020-02-07 05:36:26 +01:00
|
|
|
const size_t cur_block_size = (u64(1) << cur_block_shift);
|
|
|
|
const size_t next_block_size = (u64(1) << next_block_shift);
|
2020-01-29 10:49:04 +01:00
|
|
|
const size_t align = (next_block_shift != 0) ? next_block_size : cur_block_size;
|
2020-04-19 02:10:26 +02:00
|
|
|
return KPageBitmap::CalculateMetadataOverheadSize((align * 2 + util::AlignUp(region_size, align)) / cur_block_size);
|
2020-01-29 10:49:04 +01:00
|
|
|
}
|
|
|
|
};
|
2020-02-07 02:40:57 +01:00
|
|
|
private:
|
|
|
|
KVirtualAddress heap_address;
|
|
|
|
size_t heap_size;
|
|
|
|
size_t used_size;
|
|
|
|
size_t num_blocks;
|
|
|
|
Block blocks[NumMemoryBlockPageShifts];
|
|
|
|
private:
|
|
|
|
void Initialize(KVirtualAddress heap_address, size_t heap_size, KVirtualAddress metadata_address, size_t metadata_size, const size_t *block_shifts, size_t num_block_shifts);
|
2020-02-07 05:36:26 +01:00
|
|
|
size_t GetNumFreePages() const;
|
|
|
|
|
|
|
|
void FreeBlock(KVirtualAddress block, s32 index);
|
2020-01-29 10:49:04 +01:00
|
|
|
public:
|
2020-04-19 02:10:26 +02:00
|
|
|
KPageHeap() : heap_address(), heap_size(), used_size(), num_blocks(), blocks() { /* ... */ }
|
2020-02-07 02:40:57 +01:00
|
|
|
|
2020-02-07 13:58:35 +01:00
|
|
|
constexpr KVirtualAddress GetAddress() const { return this->heap_address; }
|
|
|
|
constexpr size_t GetSize() const { return this->heap_size; }
|
|
|
|
constexpr KVirtualAddress GetEndAddress() const { return this->GetAddress() + this->GetSize(); }
|
|
|
|
constexpr size_t GetPageOffset(KVirtualAddress block) const { return (block - this->GetAddress()) / PageSize; }
|
2020-07-24 17:07:34 +02:00
|
|
|
constexpr size_t GetPageOffsetToEnd(KVirtualAddress block) const { return (this->GetEndAddress() - block) / PageSize; }
|
2020-02-07 05:36:26 +01:00
|
|
|
|
2020-02-07 02:40:57 +01:00
|
|
|
void Initialize(KVirtualAddress heap_address, size_t heap_size, KVirtualAddress metadata_address, size_t metadata_size) {
|
|
|
|
return Initialize(heap_address, heap_size, metadata_address, metadata_size, MemoryBlockPageShifts, NumMemoryBlockPageShifts);
|
|
|
|
}
|
2020-02-07 05:36:26 +01:00
|
|
|
|
2020-07-21 10:59:48 +02:00
|
|
|
size_t GetFreeSize() const { return this->GetNumFreePages() * PageSize; }
|
|
|
|
|
2020-02-07 05:36:26 +01:00
|
|
|
void UpdateUsedSize() {
|
|
|
|
this->used_size = this->heap_size - (this->GetNumFreePages() * PageSize);
|
|
|
|
}
|
|
|
|
|
2020-04-19 02:10:26 +02:00
|
|
|
KVirtualAddress AllocateBlock(s32 index, bool random);
|
2020-02-07 05:36:26 +01:00
|
|
|
void Free(KVirtualAddress addr, size_t num_pages);
|
2020-02-07 02:40:57 +01:00
|
|
|
private:
|
2020-01-29 10:49:04 +01:00
|
|
|
static size_t CalculateMetadataOverheadSize(size_t region_size, const size_t *block_shifts, size_t num_block_shifts);
|
2020-02-07 02:40:57 +01:00
|
|
|
public:
|
|
|
|
static size_t CalculateMetadataOverheadSize(size_t region_size) {
|
|
|
|
return CalculateMetadataOverheadSize(region_size, MemoryBlockPageShifts, NumMemoryBlockPageShifts);
|
|
|
|
}
|
2020-01-29 10:49:04 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
}
|