2019-12-13 10:21:43 +01:00
|
|
|
/*
|
2020-01-24 11:10:40 +01:00
|
|
|
* Copyright (c) 2018-2020 Atmosphère-NX
|
2019-12-13 10:21:43 +01:00
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
2020-01-29 23:26:24 +01:00
|
|
|
#pragma once
|
2020-01-30 07:06:25 +01:00
|
|
|
#include <mesosphere/kern_slab_helpers.hpp>
|
2020-01-30 08:46:55 +01:00
|
|
|
#include <mesosphere/kern_k_synchronization_object.hpp>
|
2020-01-31 02:07:08 +01:00
|
|
|
#include <mesosphere/kern_k_affinity_mask.hpp>
|
2020-01-31 07:46:18 +01:00
|
|
|
#include <mesosphere/kern_k_thread_context.hpp>
|
2020-01-31 09:07:06 +01:00
|
|
|
#include <mesosphere/kern_k_current_context.hpp>
|
2020-01-31 10:53:30 +01:00
|
|
|
#include <mesosphere/kern_k_timer_task.hpp>
|
|
|
|
#include <mesosphere/kern_k_worker_task.hpp>
|
2019-12-13 10:21:43 +01:00
|
|
|
|
|
|
|
namespace ams::kern {
|
|
|
|
|
2020-02-06 14:34:38 +01:00
|
|
|
class KThreadQueue;
|
|
|
|
|
2020-01-31 10:53:30 +01:00
|
|
|
using KThreadFunction = void (*)(uintptr_t);
|
|
|
|
|
|
|
|
class KThread final : public KAutoObjectWithSlabHeapAndContainer<KThread, KSynchronizationObject>, public KTimerTask, public KWorkerTask {
|
2020-01-30 08:46:55 +01:00
|
|
|
MESOSPHERE_AUTOOBJECT_TRAITS(KThread, KSynchronizationObject);
|
2020-01-30 07:06:25 +01:00
|
|
|
public:
|
2020-01-31 10:53:30 +01:00
|
|
|
static constexpr s32 MainThreadPriority = 1;
|
|
|
|
static constexpr s32 IdleThreadPriority = 64;
|
|
|
|
|
2020-01-31 09:07:06 +01:00
|
|
|
enum ThreadType : u32 {
|
|
|
|
ThreadType_Main = 0,
|
|
|
|
ThreadType_Kernel = 1,
|
|
|
|
ThreadType_HighPriority = 2,
|
|
|
|
ThreadType_User = 3,
|
|
|
|
};
|
|
|
|
|
|
|
|
enum SuspendType : u32 {
|
|
|
|
SuspendType_Process = 0,
|
|
|
|
SuspendType_Thread = 1,
|
|
|
|
SuspendType_Debug = 2,
|
2020-01-31 10:53:30 +01:00
|
|
|
SuspendType_Unk3 = 3,
|
2020-02-08 04:16:09 +01:00
|
|
|
SuspendType_Init = 4,
|
2020-01-31 10:53:30 +01:00
|
|
|
|
|
|
|
SuspendType_Count,
|
2020-01-31 09:07:06 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
enum ThreadState : u16 {
|
|
|
|
ThreadState_Initialized = 0,
|
|
|
|
ThreadState_Waiting = 1,
|
|
|
|
ThreadState_Runnable = 2,
|
|
|
|
ThreadState_Terminated = 3,
|
|
|
|
|
|
|
|
ThreadState_SuspendShift = 4,
|
|
|
|
ThreadState_Mask = (1 << ThreadState_SuspendShift) - 1,
|
|
|
|
|
|
|
|
ThreadState_ProcessSuspended = (1 << (SuspendType_Process + ThreadState_SuspendShift)),
|
|
|
|
ThreadState_ThreadSuspended = (1 << (SuspendType_Thread + ThreadState_SuspendShift)),
|
|
|
|
ThreadState_DebugSuspended = (1 << (SuspendType_Debug + ThreadState_SuspendShift)),
|
2020-01-31 10:53:30 +01:00
|
|
|
ThreadState_Unk3Suspended = (1 << (SuspendType_Unk3 + ThreadState_SuspendShift)),
|
2020-02-08 04:16:09 +01:00
|
|
|
ThreadState_InitSuspended = (1 << (SuspendType_Init + ThreadState_SuspendShift)),
|
2020-01-31 10:53:30 +01:00
|
|
|
|
|
|
|
ThreadState_SuspendFlagMask = ((1 << SuspendType_Count) - 1) << ThreadState_SuspendShift,
|
2020-01-31 09:07:06 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
enum DpcFlag : u32 {
|
2020-02-05 22:02:35 +01:00
|
|
|
DpcFlag_Terminating = (1 << 0),
|
|
|
|
DpcFlag_Terminated = (1 << 1),
|
2020-01-31 09:07:06 +01:00
|
|
|
};
|
|
|
|
|
2020-01-30 07:06:25 +01:00
|
|
|
struct StackParameters {
|
|
|
|
alignas(0x10) u8 svc_permission[0x10];
|
|
|
|
std::atomic<u8> dpc_flags;
|
|
|
|
u8 current_svc_id;
|
|
|
|
bool is_calling_svc;
|
|
|
|
bool is_in_exception_handler;
|
|
|
|
bool has_exception_svc_perms;
|
|
|
|
s32 disable_count;
|
2020-01-31 07:46:18 +01:00
|
|
|
KThreadContext *context;
|
2020-01-30 07:06:25 +01:00
|
|
|
};
|
|
|
|
static_assert(alignof(StackParameters) == 0x10);
|
2020-01-31 05:56:24 +01:00
|
|
|
|
|
|
|
struct QueueEntry {
|
|
|
|
private:
|
|
|
|
KThread *prev;
|
|
|
|
KThread *next;
|
|
|
|
public:
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr QueueEntry() : prev(nullptr), next(nullptr) { /* ... */ }
|
2020-01-31 05:56:24 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr void Initialize() {
|
2020-01-31 10:53:30 +01:00
|
|
|
this->prev = nullptr;
|
|
|
|
this->next = nullptr;
|
|
|
|
}
|
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr KThread *GetPrev() const { return this->prev; }
|
|
|
|
constexpr KThread *GetNext() const { return this->next; }
|
|
|
|
constexpr void SetPrev(KThread *t) { this->prev = t; }
|
|
|
|
constexpr void SetNext(KThread *t) { this->next = t; }
|
2020-01-31 05:56:24 +01:00
|
|
|
};
|
|
|
|
private:
|
2020-01-31 09:07:06 +01:00
|
|
|
static constexpr size_t PriorityInheritanceCountMax = 10;
|
|
|
|
union SyncObjectBuffer {
|
|
|
|
KSynchronizationObject *sync_objects[ams::svc::MaxWaitSynchronizationHandleCount];
|
|
|
|
ams::svc::Handle handles[ams::svc::MaxWaitSynchronizationHandleCount * (sizeof(KSynchronizationObject *) / sizeof(ams::svc::Handle))];
|
2020-01-31 05:56:24 +01:00
|
|
|
|
2020-01-31 09:07:06 +01:00
|
|
|
constexpr SyncObjectBuffer() : sync_objects() { /* ... */ }
|
|
|
|
};
|
|
|
|
static_assert(sizeof(SyncObjectBuffer::sync_objects) == sizeof(SyncObjectBuffer::handles));
|
2020-01-31 10:53:30 +01:00
|
|
|
private:
|
|
|
|
static inline std::atomic<u64> s_next_thread_id = 0;
|
2020-01-31 09:07:06 +01:00
|
|
|
private:
|
|
|
|
alignas(16) KThreadContext thread_context;
|
|
|
|
KAffinityMask affinity_mask;
|
|
|
|
u64 thread_id;
|
2020-02-05 22:02:35 +01:00
|
|
|
std::atomic<s64> cpu_time;
|
2020-01-31 09:07:06 +01:00
|
|
|
KSynchronizationObject *synced_object;
|
|
|
|
KLightLock *waiting_lock;
|
|
|
|
uintptr_t condvar_key;
|
|
|
|
uintptr_t entrypoint;
|
|
|
|
KProcessAddress arbiter_key;
|
|
|
|
KProcess *parent;
|
|
|
|
void *kernel_stack_top;
|
|
|
|
u32 *light_ipc_data;
|
|
|
|
KProcessAddress tls_address;
|
|
|
|
void *tls_heap_address;
|
|
|
|
KLightLock activity_pause_lock;
|
|
|
|
SyncObjectBuffer sync_object_buffer;
|
|
|
|
s64 schedule_count;
|
|
|
|
s64 last_scheduled_tick;
|
|
|
|
QueueEntry per_core_priority_queue_entry[cpu::NumCores];
|
|
|
|
QueueEntry sleeping_queue_entry;
|
2020-02-06 14:34:38 +01:00
|
|
|
KThreadQueue *sleeping_queue;
|
2020-01-31 09:07:06 +01:00
|
|
|
util::IntrusiveListNode waiter_list_node;
|
|
|
|
util::IntrusiveRedBlackTreeNode condvar_arbiter_tree_node;
|
|
|
|
util::IntrusiveListNode process_list_node;
|
|
|
|
|
|
|
|
using WaiterListTraits = util::IntrusiveListMemberTraitsDeferredAssert<&KThread::waiter_list_node>;
|
|
|
|
using WaiterList = WaiterListTraits::ListType;
|
|
|
|
|
|
|
|
WaiterList waiter_list;
|
|
|
|
WaiterList paused_waiter_list;
|
|
|
|
KThread *lock_owner;
|
|
|
|
void /* TODO KCondVar*/ *cond_var_tree;
|
|
|
|
uintptr_t debug_params[3];
|
|
|
|
u32 arbiter_value;
|
|
|
|
u32 suspend_request_flags;
|
|
|
|
u32 suspend_allowed_flags;
|
|
|
|
Result wait_result;
|
|
|
|
Result debug_exception_result;
|
|
|
|
s32 priority;
|
|
|
|
s32 core_id;
|
|
|
|
s32 base_priority;
|
|
|
|
s32 ideal_core_id;
|
|
|
|
s32 num_kernel_waiters;
|
|
|
|
KAffinityMask original_affinity_mask;
|
|
|
|
s32 original_ideal_core_id;
|
|
|
|
s32 num_core_migration_disables;
|
|
|
|
ThreadState thread_state;
|
|
|
|
std::atomic<bool> termination_requested;
|
|
|
|
bool ipc_cancelled;
|
|
|
|
bool wait_cancelled;
|
2020-01-31 10:53:30 +01:00
|
|
|
bool cancellable;
|
2020-01-31 09:07:06 +01:00
|
|
|
bool registered;
|
|
|
|
bool signaled;
|
|
|
|
bool initialized;
|
|
|
|
bool debug_attached;
|
|
|
|
s8 priority_inheritance_count;
|
|
|
|
bool resource_limit_release_hint;
|
|
|
|
public:
|
2020-02-14 07:05:20 +01:00
|
|
|
constexpr KThread() :
|
|
|
|
thread_context(), affinity_mask(), thread_id(), cpu_time(), synced_object(), waiting_lock(),
|
|
|
|
condvar_key(), entrypoint(), arbiter_key(), parent(), kernel_stack_top(), light_ipc_data(),
|
|
|
|
tls_address(), tls_heap_address(), activity_pause_lock(), sync_object_buffer(), schedule_count(),
|
|
|
|
last_scheduled_tick(), per_core_priority_queue_entry(), sleeping_queue_entry(), sleeping_queue(), waiter_list_node(),
|
|
|
|
condvar_arbiter_tree_node(), process_list_node(), waiter_list(), paused_waiter_list(), lock_owner(),
|
|
|
|
cond_var_tree(), debug_params(), arbiter_value(), suspend_request_flags(), suspend_allowed_flags(),
|
|
|
|
wait_result(ResultSuccess()), debug_exception_result(ResultSuccess()), priority(), core_id(), base_priority(),
|
|
|
|
ideal_core_id(), num_kernel_waiters(), original_affinity_mask(), original_ideal_core_id(), num_core_migration_disables(),
|
|
|
|
thread_state(), termination_requested(), ipc_cancelled(), wait_cancelled(), cancellable(),
|
|
|
|
registered(), signaled(), initialized(), debug_attached(), priority_inheritance_count(),
|
|
|
|
resource_limit_release_hint()
|
|
|
|
{
|
|
|
|
/* ... */
|
|
|
|
}
|
2020-01-31 10:53:30 +01:00
|
|
|
virtual ~KThread() { /* ... */ }
|
2020-01-31 09:07:06 +01:00
|
|
|
/* TODO: Is a constexpr KThread() possible? */
|
2020-01-31 10:53:30 +01:00
|
|
|
|
|
|
|
Result Initialize(KThreadFunction func, uintptr_t arg, void *kern_stack_top, KProcessAddress user_stack_top, s32 prio, s32 core, KProcess *owner, ThreadType type);
|
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
private:
|
|
|
|
static Result InitializeThread(KThread *thread, KThreadFunction func, uintptr_t arg, KProcessAddress user_stack_top, s32 prio, s32 core, KProcess *owner, ThreadType type);
|
|
|
|
public:
|
|
|
|
static Result InitializeKernelThread(KThread *thread, KThreadFunction func, uintptr_t arg, s32 prio, s32 core) {
|
|
|
|
return InitializeThread(thread, func, arg, Null<KProcessAddress>, prio, core, nullptr, ThreadType_Kernel);
|
|
|
|
}
|
|
|
|
|
|
|
|
static Result InitializeHighPriorityThread(KThread *thread, KThreadFunction func, uintptr_t arg) {
|
|
|
|
return InitializeThread(thread, func, arg, Null<KProcessAddress>, 0, GetCurrentCoreId(), nullptr, ThreadType_HighPriority);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* TODO: static Result InitializeUserThread */
|
2020-01-31 09:07:06 +01:00
|
|
|
private:
|
|
|
|
StackParameters &GetStackParameters() {
|
|
|
|
return *(reinterpret_cast<StackParameters *>(this->kernel_stack_top) - 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
const StackParameters &GetStackParameters() const {
|
2020-01-31 10:53:30 +01:00
|
|
|
return *(reinterpret_cast<const StackParameters *>(this->kernel_stack_top) - 1);
|
2020-01-31 09:07:06 +01:00
|
|
|
}
|
|
|
|
public:
|
|
|
|
ALWAYS_INLINE s32 GetDisableDispatchCount() const {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
2020-02-08 11:49:32 +01:00
|
|
|
return this->GetStackParameters().disable_count;
|
2020-01-31 09:07:06 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void DisableDispatch() {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
|
|
|
MESOSPHERE_ASSERT(GetCurrentThread().GetDisableDispatchCount() >= 0);
|
2020-02-08 11:49:32 +01:00
|
|
|
this->GetStackParameters().disable_count++;
|
2020-01-31 09:07:06 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void EnableDispatch() {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
|
|
|
MESOSPHERE_ASSERT(GetCurrentThread().GetDisableDispatchCount() > 0);
|
2020-02-08 11:49:32 +01:00
|
|
|
this->GetStackParameters().disable_count--;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void SetInExceptionHandler() {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
|
|
|
this->GetStackParameters().is_in_exception_handler = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void ClearInExceptionHandler() {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
|
|
|
this->GetStackParameters().is_in_exception_handler = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE bool IsInExceptionHandler() const {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
|
|
|
return this->GetStackParameters().is_in_exception_handler;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void RegisterDpc(DpcFlag flag) {
|
|
|
|
this->GetStackParameters().dpc_flags |= flag;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void ClearDpc(DpcFlag flag) {
|
|
|
|
this->GetStackParameters().dpc_flags &= ~flag;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE u8 GetDpc() const {
|
|
|
|
return this->GetStackParameters().dpc_flags;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE bool HasDpc() const {
|
|
|
|
MESOSPHERE_ASSERT_THIS();
|
|
|
|
return this->GetDpc() != 0;;
|
2020-01-31 09:07:06 +01:00
|
|
|
}
|
2020-02-08 04:16:09 +01:00
|
|
|
private:
|
|
|
|
void Suspend();
|
2020-01-31 09:07:06 +01:00
|
|
|
public:
|
2020-02-08 11:49:32 +01:00
|
|
|
constexpr KThreadContext *GetContext() { return std::addressof(this->thread_context); }
|
|
|
|
constexpr const KThreadContext *GetContext() const { return std::addressof(this->thread_context); }
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr const KAffinityMask &GetAffinityMask() const { return this->affinity_mask; }
|
|
|
|
constexpr ThreadState GetState() const { return static_cast<ThreadState>(this->thread_state & ThreadState_Mask); }
|
|
|
|
constexpr ThreadState GetRawState() const { return this->thread_state; }
|
2020-02-05 22:02:35 +01:00
|
|
|
NOINLINE void SetState(ThreadState state);
|
|
|
|
|
|
|
|
NOINLINE KThreadContext *GetContextForSchedulerLoop();
|
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr s32 GetActiveCore() const { return this->core_id; }
|
|
|
|
constexpr void SetActiveCore(s32 core) { this->core_id = core; }
|
|
|
|
constexpr s32 GetPriority() const { return this->priority; }
|
2020-02-05 22:02:35 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr QueueEntry &GetPriorityQueueEntry(s32 core) { return this->per_core_priority_queue_entry[core]; }
|
|
|
|
constexpr const QueueEntry &GetPriorityQueueEntry(s32 core) const { return this->per_core_priority_queue_entry[core]; }
|
2020-02-05 22:02:35 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr QueueEntry &GetSleepingQueueEntry() { return this->sleeping_queue_entry; }
|
|
|
|
constexpr const QueueEntry &GetSleepingQueueEntry() const { return this->sleeping_queue_entry; }
|
|
|
|
constexpr void SetSleepingQueue(KThreadQueue *q) { this->sleeping_queue = q; }
|
2020-02-06 14:34:38 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr s32 GetNumKernelWaiters() const { return this->num_kernel_waiters; }
|
2020-02-05 22:02:35 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr s64 GetLastScheduledTick() const { return this->last_scheduled_tick; }
|
|
|
|
constexpr void SetLastScheduledTick(s64 tick) { this->last_scheduled_tick = tick; }
|
2020-02-05 22:02:35 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr KProcess *GetOwnerProcess() const { return this->parent; }
|
|
|
|
constexpr bool IsUserThread() const { return this->parent != nullptr; }
|
2020-02-05 22:02:35 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr KProcessAddress GetThreadLocalRegionAddress() const { return this->tls_address; }
|
|
|
|
constexpr void *GetThreadLocalRegionHeapAddress() const { return this->tls_heap_address; }
|
2020-02-05 22:02:35 +01:00
|
|
|
|
2020-02-08 11:49:32 +01:00
|
|
|
constexpr u16 GetUserPreemptionState() const { return *GetPointer<u16>(this->tls_address + 0x100); }
|
|
|
|
constexpr void SetKernelPreemptionState(u16 state) const { *GetPointer<u16>(this->tls_address + 0x100 + sizeof(u16)) = state; }
|
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
void AddCpuTime(s64 amount) {
|
2020-02-05 22:02:35 +01:00
|
|
|
this->cpu_time += amount;
|
|
|
|
}
|
2020-01-31 09:07:06 +01:00
|
|
|
|
2020-02-08 04:16:09 +01:00
|
|
|
constexpr u32 GetSuspendFlags() const { return this->suspend_allowed_flags & this->suspend_request_flags; }
|
|
|
|
constexpr bool IsSuspended() const { return this->GetSuspendFlags() != 0; }
|
|
|
|
void RequestSuspend(SuspendType type);
|
|
|
|
void TrySuspend();
|
|
|
|
|
|
|
|
Result SetPriorityToIdle();
|
|
|
|
|
|
|
|
Result Run();
|
|
|
|
void Exit();
|
|
|
|
|
2020-01-31 10:53:30 +01:00
|
|
|
ALWAYS_INLINE void *GetStackTop() const { return reinterpret_cast<StackParameters *>(this->kernel_stack_top) - 1; }
|
|
|
|
ALWAYS_INLINE void *GetKernelStackTop() const { return this->kernel_stack_top; }
|
|
|
|
|
|
|
|
/* TODO: This is kind of a placeholder definition. */
|
|
|
|
|
2020-02-05 22:02:35 +01:00
|
|
|
ALWAYS_INLINE bool IsTerminationRequested() const {
|
2020-02-06 14:34:38 +01:00
|
|
|
return this->termination_requested || this->GetRawState() == ThreadState_Terminated;
|
2020-02-05 22:02:35 +01:00
|
|
|
}
|
|
|
|
|
2020-01-31 10:53:30 +01:00
|
|
|
public:
|
|
|
|
/* Overridden parent functions. */
|
|
|
|
virtual bool IsInitialized() const override { return this->initialized; }
|
|
|
|
virtual uintptr_t GetPostDestroyArgument() const override { return reinterpret_cast<uintptr_t>(this->parent) | (this->resource_limit_release_hint ? 1 : 0); }
|
|
|
|
|
|
|
|
static void PostDestroy(uintptr_t arg);
|
2020-01-31 05:56:24 +01:00
|
|
|
|
2020-01-31 10:53:30 +01:00
|
|
|
virtual void Finalize() override;
|
|
|
|
virtual bool IsSignaled() const override;
|
|
|
|
virtual void OnTimer() override;
|
|
|
|
virtual void DoWorkerTask() override;
|
2020-01-31 09:07:06 +01:00
|
|
|
public:
|
|
|
|
static constexpr bool IsWaiterListValid() {
|
|
|
|
return WaiterListTraits::IsValid();
|
|
|
|
}
|
2020-01-29 23:26:24 +01:00
|
|
|
};
|
2020-01-31 09:07:06 +01:00
|
|
|
static_assert(alignof(KThread) == 0x10);
|
|
|
|
static_assert(KThread::IsWaiterListValid());
|
2019-12-13 10:21:43 +01:00
|
|
|
|
2020-01-31 00:29:51 +01:00
|
|
|
class KScopedDisableDispatch {
|
|
|
|
public:
|
|
|
|
explicit ALWAYS_INLINE KScopedDisableDispatch() {
|
2020-01-31 09:07:06 +01:00
|
|
|
GetCurrentThread().DisableDispatch();
|
2020-01-31 00:29:51 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE ~KScopedDisableDispatch() {
|
2020-01-31 09:07:06 +01:00
|
|
|
GetCurrentThread().EnableDispatch();
|
2020-01-31 00:29:51 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
class KScopedEnableDispatch {
|
|
|
|
public:
|
|
|
|
explicit ALWAYS_INLINE KScopedEnableDispatch() {
|
2020-01-31 09:07:06 +01:00
|
|
|
GetCurrentThread().EnableDispatch();
|
2020-01-31 00:29:51 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE ~KScopedEnableDispatch() {
|
2020-01-31 09:07:06 +01:00
|
|
|
GetCurrentThread().DisableDispatch();
|
2020-01-31 00:29:51 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-13 10:21:43 +01:00
|
|
|
}
|