mirror of
https://github.com/Atmosphere-NX/Atmosphere
synced 2024-11-09 22:56:35 +00:00
kern: Implement exception vector ASM
This commit is contained in:
parent
e330b6187f
commit
919b8124dc
26 changed files with 1497 additions and 60 deletions
|
@ -15,14 +15,12 @@
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#define MESOSPHERE_BUILD_FOR_AUDITING
|
||||
|
||||
/* All kernel code should have access to libvapours. */
|
||||
#include <vapours.hpp>
|
||||
|
||||
/* First, pull in core macros (panic, etc). */
|
||||
#include <mesosphere/kern_panic.hpp>
|
||||
#include <mesosphere/kern_common.hpp>
|
||||
#include <mesosphere/kern_panic.hpp>
|
||||
|
||||
/* Primitive types. */
|
||||
#include <mesosphere/kern_k_typed_address.hpp>
|
||||
|
|
|
@ -43,6 +43,7 @@ namespace ams::kern::arm64::cpu {
|
|||
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(VbarEl1, vbar_el1)
|
||||
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(FarEl1, far_el1)
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(ParEl1, par_el1)
|
||||
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(SctlrEl1, sctlr_el1)
|
||||
|
@ -56,6 +57,10 @@ namespace ams::kern::arm64::cpu {
|
|||
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(TpidrRoEl0, tpidrro_el0)
|
||||
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(EsrEl1, esr_el1)
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(Afsr0El1, afsr0_el1)
|
||||
MESOSPHERE_CPU_DEFINE_SYSREG_ACCESSORS(Afsr1El1, afsr1_el1)
|
||||
|
||||
#define FOR_I_IN_0_TO_15(HANDLER, ...) \
|
||||
HANDLER(0, ## __VA_ARGS__) HANDLER(1, ## __VA_ARGS__) HANDLER(2, ## __VA_ARGS__) HANDLER(3, ## __VA_ARGS__) \
|
||||
HANDLER(4, ## __VA_ARGS__) HANDLER(5, ## __VA_ARGS__) HANDLER(6, ## __VA_ARGS__) HANDLER(7, ## __VA_ARGS__) \
|
||||
|
@ -139,6 +144,24 @@ namespace ams::kern::arm64::cpu {
|
|||
}
|
||||
};
|
||||
|
||||
MESOSPHERE_CPU_SYSREG_ACCESSOR_CLASS(ArchitecturalFeatureAccessControl) {
|
||||
public:
|
||||
MESOSPHERE_CPU_SYSREG_ACCESSOR_CLASS_FUNCTIONS(ArchitecturalFeatureAccessControl, cpacr_el1)
|
||||
|
||||
constexpr ALWAYS_INLINE decltype(auto) SetFpEnabled(bool en) {
|
||||
if (en) {
|
||||
this->SetBits(20, 2, 0x3);
|
||||
} else {
|
||||
this->SetBits(20, 2, 0x0);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool IsFpEnabled() {
|
||||
return this->GetBits(20, 2) != 0;
|
||||
}
|
||||
};
|
||||
|
||||
MESOSPHERE_CPU_SYSREG_ACCESSOR_CLASS(DebugFeature) {
|
||||
public:
|
||||
MESOSPHERE_CPU_SYSREG_ACCESSOR_CLASS_FUNCTIONS(DebugFeature, id_aa64dfr0_el1)
|
||||
|
|
|
@ -143,6 +143,14 @@ namespace ams::kern::arm64 {
|
|||
void Initialize(s32 core_id);
|
||||
void Finalize(s32 core_id);
|
||||
public:
|
||||
u32 GetIrq() const {
|
||||
return this->gicc->iar;
|
||||
}
|
||||
|
||||
static constexpr s32 ConvertRawIrq(u32 irq) {
|
||||
return (irq == 0x3FF) ? -1 : (irq & 0x3FF);
|
||||
}
|
||||
|
||||
void Enable(s32 irq) const {
|
||||
this->gicd->isenabler[irq / BITSIZEOF(u32)] = (1u << (irq % BITSIZEOF(u32)));
|
||||
}
|
||||
|
|
|
@ -60,6 +60,8 @@ namespace ams::kern::arm64 {
|
|||
static ALWAYS_INLINE KSpinLock &GetLock() { return s_lock; }
|
||||
static ALWAYS_INLINE KGlobalInterruptEntry &GetGlobalInterruptEntry(s32 irq) { return s_global_interrupts[KInterruptController::GetGlobalInterruptIndex(irq)]; }
|
||||
ALWAYS_INLINE KCoreLocalInterruptEntry &GetLocalInterruptEntry(s32 irq) { return this->core_local_interrupts[KInterruptController::GetLocalInterruptIndex(irq)]; }
|
||||
|
||||
bool OnHandleInterrupt();
|
||||
public:
|
||||
constexpr KInterruptManager() : core_local_interrupts(), interrupt_controller(), local_state(), local_state_saved(false) { /* ... */ }
|
||||
NOINLINE void Initialize(s32 core_id);
|
||||
|
@ -79,6 +81,8 @@ namespace ams::kern::arm64 {
|
|||
this->interrupt_controller.SendInterProcessorInterrupt(irq);
|
||||
}
|
||||
|
||||
static void HandleInterrupt(bool user_mode);
|
||||
|
||||
/* Implement more KInterruptManager functionality. */
|
||||
private:
|
||||
Result BindGlobal(KInterruptHandler *handler, s32 irq, s32 core_id, s32 priority, bool manual_clear, bool level);
|
||||
|
|
|
@ -62,6 +62,8 @@ namespace ams::kern::arm64 {
|
|||
Result Initialize(KVirtualAddress u_pc, KVirtualAddress k_sp, KVirtualAddress u_sp, uintptr_t arg, bool is_user, bool is_64_bit, bool is_main);
|
||||
Result Finalize();
|
||||
|
||||
static void FpuContextSwitchHandler(KThread *thread);
|
||||
|
||||
/* TODO: More methods (especially FPU management) */
|
||||
};
|
||||
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <mesosphere/kern_common.hpp>
|
||||
|
||||
namespace ams::kern::arm64 {
|
||||
|
||||
void UserspaceMemoryAccessFunctionAreaBegin();
|
||||
|
||||
void UserspaceMemoryAccessFunctionAreaEnd();
|
||||
|
||||
}
|
|
@ -24,6 +24,10 @@ namespace ams::kern {
|
|||
|
||||
}
|
||||
|
||||
#if 1
|
||||
#define MESOSPHERE_BUILD_FOR_AUDITING
|
||||
#endif
|
||||
|
||||
#ifdef MESOSPHERE_BUILD_FOR_AUDITING
|
||||
#define MESOSPHERE_BUILD_FOR_DEBUGGING
|
||||
#endif
|
||||
|
|
|
@ -29,7 +29,7 @@ namespace ams::kern {
|
|||
KScheduler *scheduler;
|
||||
KInterruptTaskManager *interrupt_task_manager;
|
||||
s32 core_id;
|
||||
void *exception_stack_bottom;
|
||||
void *exception_stack_top;
|
||||
};
|
||||
static_assert(std::is_pod<KCurrentContext>::value);
|
||||
static_assert(sizeof(KCurrentContext) <= cpu::DataCacheLineSize);
|
||||
|
|
|
@ -29,9 +29,9 @@ namespace ams::kern {
|
|||
public:
|
||||
constexpr TaskQueue() : head(nullptr), tail(nullptr) { /* ... */ }
|
||||
|
||||
ALWAYS_INLINE KInterruptTask *GetHead() { return this->head; }
|
||||
ALWAYS_INLINE bool IsEmpty() const { return this->head == nullptr; }
|
||||
ALWAYS_INLINE void Clear() { this->head = nullptr; this->tail = nullptr; }
|
||||
constexpr KInterruptTask *GetHead() { return this->head; }
|
||||
constexpr bool IsEmpty() const { return this->head == nullptr; }
|
||||
constexpr void Clear() { this->head = nullptr; this->tail = nullptr; }
|
||||
|
||||
void Enqueue(KInterruptTask *task);
|
||||
void Dequeue();
|
||||
|
|
|
@ -439,8 +439,8 @@ namespace ams::kern {
|
|||
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscIdleStack, static_cast<u32>(core_id))->GetEndAddress();
|
||||
}
|
||||
|
||||
static NOINLINE KVirtualAddress GetExceptionStackBottomAddress(s32 core_id) {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast<u32>(core_id))->GetAddress();
|
||||
static NOINLINE KVirtualAddress GetExceptionStackTopAddress(s32 core_id) {
|
||||
return GetVirtualMemoryRegionTree().FindFirstRegionByTypeAttr(KMemoryRegionType_KernelMiscExceptionStack, static_cast<u32>(core_id))->GetEndAddress();
|
||||
}
|
||||
|
||||
static NOINLINE KVirtualAddress GetSlabRegionAddress() {
|
||||
|
|
|
@ -32,7 +32,9 @@ namespace ams::kern {
|
|||
|
||||
constexpr ALWAYS_INLINE bool Is64Bit() const { /* TODO */ return true; }
|
||||
|
||||
ALWAYS_INLINE KThread *GetSuggestedTopThread(s32 core_id) { /* TODO */ return nullptr; }
|
||||
ALWAYS_INLINE KThread *GetPreemptionStatePinnedThread(s32 core_id) { /* TODO */ return nullptr; }
|
||||
|
||||
void SetPreemptionState();
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -74,6 +74,14 @@ namespace ams::kern {
|
|||
|
||||
NOINLINE void Initialize(KThread *idle_thread);
|
||||
NOINLINE void Activate();
|
||||
|
||||
ALWAYS_INLINE void RequestScheduleOnInterrupt() {
|
||||
SetSchedulerUpdateNeeded();
|
||||
|
||||
if (CanSchedule()) {
|
||||
this->ScheduleOnInterrupt();
|
||||
}
|
||||
}
|
||||
private:
|
||||
/* Static private API. */
|
||||
static ALWAYS_INLINE bool IsSchedulerUpdateNeeded() { return s_scheduler_update_needed; }
|
||||
|
@ -130,6 +138,11 @@ namespace ams::kern {
|
|||
this->ScheduleImpl();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ScheduleOnInterrupt() {
|
||||
KScopedDisableDispatch dd;
|
||||
this->Schedule();
|
||||
}
|
||||
|
||||
void RescheduleOtherCores(u64 cores_needing_scheduling);
|
||||
|
||||
ALWAYS_INLINE void RescheduleCurrentCore() {
|
||||
|
|
|
@ -202,23 +202,57 @@ namespace ams::kern {
|
|||
public:
|
||||
ALWAYS_INLINE s32 GetDisableDispatchCount() const {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
return GetStackParameters().disable_count;
|
||||
return this->GetStackParameters().disable_count;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void DisableDispatch() {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
MESOSPHERE_ASSERT(GetCurrentThread().GetDisableDispatchCount() >= 0);
|
||||
GetStackParameters().disable_count++;
|
||||
this->GetStackParameters().disable_count++;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void EnableDispatch() {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
MESOSPHERE_ASSERT(GetCurrentThread().GetDisableDispatchCount() > 0);
|
||||
GetStackParameters().disable_count--;
|
||||
this->GetStackParameters().disable_count--;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void SetInExceptionHandler() {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
this->GetStackParameters().is_in_exception_handler = true;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ClearInExceptionHandler() {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
this->GetStackParameters().is_in_exception_handler = false;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE bool IsInExceptionHandler() const {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
return this->GetStackParameters().is_in_exception_handler;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void RegisterDpc(DpcFlag flag) {
|
||||
this->GetStackParameters().dpc_flags |= flag;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ClearDpc(DpcFlag flag) {
|
||||
this->GetStackParameters().dpc_flags &= ~flag;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE u8 GetDpc() const {
|
||||
return this->GetStackParameters().dpc_flags;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE bool HasDpc() const {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
return this->GetDpc() != 0;;
|
||||
}
|
||||
private:
|
||||
void Suspend();
|
||||
public:
|
||||
constexpr KThreadContext *GetContext() { return std::addressof(this->thread_context); }
|
||||
constexpr const KThreadContext *GetContext() const { return std::addressof(this->thread_context); }
|
||||
constexpr const KAffinityMask &GetAffinityMask() const { return this->affinity_mask; }
|
||||
constexpr ThreadState GetState() const { return static_cast<ThreadState>(this->thread_state & ThreadState_Mask); }
|
||||
constexpr ThreadState GetRawState() const { return this->thread_state; }
|
||||
|
@ -248,6 +282,9 @@ namespace ams::kern {
|
|||
constexpr KProcessAddress GetThreadLocalRegionAddress() const { return this->tls_address; }
|
||||
constexpr void *GetThreadLocalRegionHeapAddress() const { return this->tls_heap_address; }
|
||||
|
||||
constexpr u16 GetUserPreemptionState() const { return *GetPointer<u16>(this->tls_address + 0x100); }
|
||||
constexpr void SetKernelPreemptionState(u16 state) const { *GetPointer<u16>(this->tls_address + 0x100 + sizeof(u16)) = state; }
|
||||
|
||||
void AddCpuTime(s64 amount) {
|
||||
this->cpu_time += amount;
|
||||
}
|
||||
|
@ -267,14 +304,6 @@ namespace ams::kern {
|
|||
|
||||
/* TODO: This is kind of a placeholder definition. */
|
||||
|
||||
ALWAYS_INLINE bool IsInExceptionHandler() const {
|
||||
return GetStackParameters().is_in_exception_handler;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void SetInExceptionHandler() {
|
||||
GetStackParameters().is_in_exception_handler = true;
|
||||
}
|
||||
|
||||
ALWAYS_INLINE bool IsTerminationRequested() const {
|
||||
return this->termination_requested || this->GetRawState() == ThreadState_Terminated;
|
||||
}
|
||||
|
|
|
@ -25,9 +25,9 @@ namespace ams::kern {
|
|||
}
|
||||
|
||||
#ifdef MESOSPHERE_ENABLE_DEBUG_PRINT
|
||||
#define MESOSPHERE_PANIC(...) ams::kern::Panic(__FILE__, __LINE__, __VA_ARGS__)
|
||||
#define MESOSPHERE_PANIC(...) ::ams::kern::Panic(__FILE__, __LINE__, __VA_ARGS__)
|
||||
#else
|
||||
#define MESOSPHERE_PANIC(...) ams::kern::Panic()
|
||||
#define MESOSPHERE_PANIC(...) ::ams::kern::Panic()
|
||||
#endif
|
||||
|
||||
#ifdef MESOSPHERE_ENABLE_ASSERTIONS
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
*/
|
||||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <mesosphere/svc/kern_svc_results.hpp>
|
||||
#include <mesosphere/svc/kern_svc_prototypes.hpp>
|
||||
|
||||
namespace ams::kern::svc {
|
||||
|
||||
|
|
|
@ -0,0 +1,166 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <mesosphere.hpp>
|
||||
|
||||
namespace ams::kern::arm64 {
|
||||
|
||||
namespace {
|
||||
|
||||
constexpr u32 GetInstructionData(const KExceptionContext *context, u64 esr) {
|
||||
/* Check for THUMB usermode */
|
||||
if ((context->psr & 0x3F) == 0x30) {
|
||||
u32 insn = *reinterpret_cast<u16 *>(context->pc & ~0x1);
|
||||
/* Check if the instruction was 32-bit. */
|
||||
if ((esr >> 25) & 1) {
|
||||
insn = (insn << 16) | *reinterpret_cast<u16 *>((context->pc & ~0x1) + sizeof(u16));
|
||||
}
|
||||
return insn;
|
||||
} else {
|
||||
/* Not thumb, so just get the instruction. */
|
||||
return *reinterpret_cast<u32 *>(context->pc);
|
||||
}
|
||||
}
|
||||
|
||||
void HandleUserException(KExceptionContext *context, u64 esr, u64 far, u64 afsr0, u64 afsr1, u32 data) {
|
||||
KProcess *cur_process = GetCurrentProcessPointer();
|
||||
bool should_process_user_exception = KTargetSystem::IsUserExceptionHandlersEnabled();
|
||||
|
||||
const u64 ec = (esr >> 26) & 0x3F;
|
||||
switch (ec) {
|
||||
case 0x0: /* Unknown */
|
||||
case 0xE: /* Illegal Execution State */
|
||||
case 0x11: /* SVC instruction from Aarch32 */
|
||||
case 0x15: /* SVC instruction from Aarch64 */
|
||||
case 0x22: /* PC Misalignment */
|
||||
case 0x26: /* SP Misalignment */
|
||||
case 0x2F: /* SError */
|
||||
case 0x30: /* Breakpoint from lower EL */
|
||||
case 0x32: /* SoftwareStep from lower EL */
|
||||
case 0x34: /* Watchpoint from lower EL */
|
||||
case 0x38: /* BKPT instruction */
|
||||
case 0x3C: /* BRK instruction */
|
||||
break;
|
||||
default:
|
||||
{
|
||||
/* TODO: Get memory state. */
|
||||
/* If state is KMemoryState_Code and the user can't read it, set should_process_user_exception = true; */
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (should_process_user_exception) {
|
||||
/* TODO: Process the user exception. */
|
||||
}
|
||||
|
||||
{
|
||||
/* TODO: Process for KDebug. */
|
||||
|
||||
MESOSPHERE_RELEASE_LOG("Exception occurred. %016lx\n", 0ul /* TODO: cur_process->GetProgramId() */);
|
||||
|
||||
/* TODO: if (!svc::ResultNotHandled::Includes(res)) { debug process } */
|
||||
}
|
||||
|
||||
/* TODO: cur_process->Exit(); */
|
||||
(void)cur_process;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* NOTE: This function is called from ASM. */
|
||||
void FpuContextSwitchHandler() {
|
||||
KThreadContext::FpuContextSwitchHandler(GetCurrentThreadPointer());
|
||||
}
|
||||
|
||||
/* NOTE: This function is called from ASM. */
|
||||
void HandleException(KExceptionContext *context) {
|
||||
MESOSPHERE_ASSERT(!KInterruptManager::AreInterruptsEnabled());
|
||||
|
||||
/* Retrieve information about the exception. */
|
||||
const u64 esr = cpu::GetEsrEl1();
|
||||
const u64 afsr0 = cpu::GetAfsr0El1();
|
||||
const u64 afsr1 = cpu::GetAfsr1El1();
|
||||
u64 far = 0;
|
||||
u32 data = 0;
|
||||
|
||||
/* Collect far and data based on the ec. */
|
||||
switch ((esr >> 26) & 0x3F) {
|
||||
case 0x0: /* Unknown */
|
||||
case 0xE: /* Illegal Execution State */
|
||||
case 0x38: /* BKPT instruction */
|
||||
case 0x3C: /* BRK instruction */
|
||||
far = context->pc;
|
||||
data = GetInstructionData(context, esr);
|
||||
break;
|
||||
case 0x11: /* SVC instruction from Aarch32 */
|
||||
if (context->psr & 0x20) {
|
||||
/* Thumb mode. */
|
||||
context->pc -= 2;
|
||||
} else {
|
||||
/* ARM mode. */
|
||||
context->pc -= 4;
|
||||
}
|
||||
far = context->pc;
|
||||
break;
|
||||
case 0x15: /* SVC instruction from Aarch64 */
|
||||
context->pc -= 4;
|
||||
far = context->pc;
|
||||
break;
|
||||
case 0x30: /* Breakpoint from lower EL */
|
||||
far = context->pc;
|
||||
break;
|
||||
default:
|
||||
far = cpu::GetFarEl1();
|
||||
break;
|
||||
}
|
||||
|
||||
/* Note that we're in an exception handler. */
|
||||
GetCurrentThread().SetInExceptionHandler();
|
||||
{
|
||||
const bool is_user_mode = (context->psr & 0xF) == 0;
|
||||
if (is_user_mode) {
|
||||
/* Handle any changes needed to the user preemption state. */
|
||||
if (GetCurrentThread().GetUserPreemptionState() != 0 && GetCurrentProcess().GetPreemptionStatePinnedThread(GetCurrentCoreId()) == nullptr) {
|
||||
KScopedSchedulerLock lk;
|
||||
|
||||
/* Note the preemption state in process. */
|
||||
GetCurrentProcess().SetPreemptionState();
|
||||
|
||||
/* Set the kernel preemption state flag. */
|
||||
GetCurrentThread().SetKernelPreemptionState(1);
|
||||
}
|
||||
|
||||
/* Enable interrupts while we process the usermode exception. */
|
||||
{
|
||||
KScopedInterruptEnable ei;
|
||||
|
||||
HandleUserException(context, esr, far, afsr0, afsr1, data);
|
||||
}
|
||||
} else {
|
||||
MESOSPHERE_PANIC("Unhandled Exception in Supervisor Mode\n");
|
||||
}
|
||||
|
||||
MESOSPHERE_ASSERT(!KInterruptManager::AreInterruptsEnabled());
|
||||
|
||||
/* Handle any DPC requests. */
|
||||
while (GetCurrentThread().HasDpc()) {
|
||||
KDpcManager::HandleDpc();
|
||||
}
|
||||
}
|
||||
/* Note that we're no longer in an exception handler. */
|
||||
GetCurrentThread().ClearInExceptionHandler();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,610 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* ams::kern::arm64::EL1IrqExceptionHandler() */
|
||||
.section .text._ZN3ams4kern5arm6422EL1IrqExceptionHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6422EL1IrqExceptionHandlerEv
|
||||
.type _ZN3ams4kern5arm6422EL1IrqExceptionHandlerEv, %function
|
||||
_ZN3ams4kern5arm6422EL1IrqExceptionHandlerEv:
|
||||
/* Save registers that need saving. */
|
||||
sub sp, sp, #(8 * 24)
|
||||
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x30, [sp, #(8 * 22)]
|
||||
|
||||
mrs x19, sp_el0
|
||||
mrs x20, elr_el1
|
||||
mrs x21, spsr_el1
|
||||
mov w21, w21
|
||||
|
||||
/* Invoke KInterruptManager::HandleInterrupt(bool user_mode). */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, #0
|
||||
bl _ZN3ams4kern5arm6417KInterruptManager15HandleInterruptEb
|
||||
|
||||
/* Restore registers that we saved. */
|
||||
msr sp_el0, x19
|
||||
msr elr_el1, x20
|
||||
msr spsr_el1, x21
|
||||
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
ldp x16, x17, [sp, #(8 * 16)]
|
||||
ldp x18, x19, [sp, #(8 * 18)]
|
||||
ldp x20, x21, [sp, #(8 * 20)]
|
||||
ldp x22, x30, [sp, #(8 * 22)]
|
||||
|
||||
add sp, sp, #(8 * 24)
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
/* ams::kern::arm64::EL0IrqExceptionHandler() */
|
||||
.section .text._ZN3ams4kern5arm6422EL0IrqExceptionHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6422EL0IrqExceptionHandlerEv
|
||||
.type _ZN3ams4kern5arm6422EL0IrqExceptionHandlerEv, %function
|
||||
_ZN3ams4kern5arm6422EL0IrqExceptionHandlerEv:
|
||||
/* Save registers that need saving. */
|
||||
sub sp, sp, #(8 * 36)
|
||||
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x23, [sp, #(8 * 22)]
|
||||
stp x24, x25, [sp, #(8 * 24)]
|
||||
stp x26, x27, [sp, #(8 * 26)]
|
||||
stp x28, x29, [sp, #(8 * 28)]
|
||||
|
||||
mrs x20, sp_el0
|
||||
mrs x21, elr_el1
|
||||
mrs x22, spsr_el1
|
||||
mrs x23, tpidr_el0
|
||||
mov w22, w22
|
||||
stp x30, x20, [sp, #(8 * 30)]
|
||||
stp x21, x22, [sp, #(8 * 32)]
|
||||
str x23, [sp, #(8 * 34)]
|
||||
|
||||
/* Invoke KInterruptManager::HandleInterrupt(bool user_mode). */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, #1
|
||||
bl _ZN3ams4kern5arm6417KInterruptManager15HandleInterruptEb
|
||||
|
||||
/* Restore state from the context. */
|
||||
ldp x30, x20, [sp, #(8 * 30)]
|
||||
ldp x21, x22, [sp, #(8 * 32)]
|
||||
ldr x23, [sp, #(8 * 34)]
|
||||
msr sp_el0, x20
|
||||
msr elr_el1, x21
|
||||
msr spsr_el1, x22
|
||||
msr tpidr_el0, x23
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
ldp x16, x17, [sp, #(8 * 16)]
|
||||
ldp x18, x19, [sp, #(8 * 18)]
|
||||
ldp x20, x21, [sp, #(8 * 20)]
|
||||
ldp x22, x23, [sp, #(8 * 22)]
|
||||
ldp x24, x25, [sp, #(8 * 24)]
|
||||
ldp x26, x27, [sp, #(8 * 26)]
|
||||
ldp x28, x29, [sp, #(8 * 28)]
|
||||
add sp, sp, #0x120
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
/* ams::kern::arm64::EL0SynchronousExceptionHandler() */
|
||||
.section .text._ZN3ams4kern5arm6430EL0SynchronousExceptionHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6430EL0SynchronousExceptionHandlerEv
|
||||
.type _ZN3ams4kern5arm6430EL0SynchronousExceptionHandlerEv, %function
|
||||
_ZN3ams4kern5arm6430EL0SynchronousExceptionHandlerEv:
|
||||
/* Save x16 and x17, so that we can use them as scratch. */
|
||||
stp x16, x17, [sp, #-16]!
|
||||
|
||||
/* Get and parse the exception syndrome register. */
|
||||
mrs x16, esr_el1
|
||||
lsr x17, x16, #0x1a
|
||||
|
||||
/* Is this an aarch32 SVC? */
|
||||
cmp x17, #0x11
|
||||
b.eq 2f
|
||||
|
||||
/* Is this an aarch64 SVC? */
|
||||
cmp x17, #0x15
|
||||
b.eq 3f
|
||||
|
||||
/* Is this an FPU error? */
|
||||
cmp x17, #0x7
|
||||
b.eq 4f
|
||||
|
||||
/* Is this an instruction abort? */
|
||||
cmp x17, #0x21
|
||||
b.eq 5f
|
||||
|
||||
/* Is this a data abort? */
|
||||
cmp x17, #0x25
|
||||
b.eq 5f
|
||||
|
||||
1: /* The exception is not a data abort or instruction abort caused by a TLB conflict. */
|
||||
/* It is also not an SVC or an FPU exception. Handle it generically! */
|
||||
|
||||
/* Restore x16 and x17. */
|
||||
ldp x16, x17, [sp], 16
|
||||
|
||||
/* Create a KExceptionContext to pass to HandleException. */
|
||||
sub sp, sp, #0x120
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x23, [sp, #(8 * 22)]
|
||||
stp x24, x25, [sp, #(8 * 24)]
|
||||
stp x26, x27, [sp, #(8 * 26)]
|
||||
stp x28, x29, [sp, #(8 * 28)]
|
||||
mrs x20, sp_el0
|
||||
mrs x21, elr_el1
|
||||
mrs x22, spsr_el1
|
||||
mrs x23, tpidr_el0
|
||||
mov w22, w22
|
||||
stp x30, x20, [sp, #(8 * 30)]
|
||||
stp x21, x22, [sp, #(8 * 32)]
|
||||
str x23, [sp, #(8 * 34)]
|
||||
|
||||
/* Call ams::kern::arm64::HandleException(ams::kern::arm64::KExceptionContext *) */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, sp
|
||||
bl _ZN3ams4kern5arm6415HandleExceptionEPNS1_17KExceptionContextE
|
||||
|
||||
/* Restore state from the context. */
|
||||
ldp x30, x20, [sp, #(8 * 30)]
|
||||
ldp x21, x22, [sp, #(8 * 32)]
|
||||
ldr x23, [sp, #(8 * 34)]
|
||||
msr sp_el0, x20
|
||||
msr elr_el1, x21
|
||||
msr spsr_el1, x22
|
||||
msr tpidr_el0, x23
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
ldp x16, x17, [sp, #(8 * 16)]
|
||||
ldp x18, x19, [sp, #(8 * 18)]
|
||||
ldp x20, x21, [sp, #(8 * 20)]
|
||||
ldp x22, x23, [sp, #(8 * 22)]
|
||||
ldp x24, x25, [sp, #(8 * 24)]
|
||||
ldp x26, x27, [sp, #(8 * 26)]
|
||||
ldp x28, x29, [sp, #(8 * 28)]
|
||||
add sp, sp, #0x120
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
2: /* SVC from aarch32. */
|
||||
ldp x16, x17, [sp], 16
|
||||
b _ZN3ams4kern5arm6412SvcHandler32Ev
|
||||
|
||||
3: /* SVC from aarch64. */
|
||||
ldp x16, x17, [sp], 16
|
||||
b _ZN3ams4kern5arm6412SvcHandler64Ev
|
||||
|
||||
4: /* FPU exception. */
|
||||
ldp x16, x17, [sp], 16
|
||||
b _ZN3ams4kern5arm6425FpuAccessExceptionHandlerEv
|
||||
|
||||
5: /* Check if there's a TLB conflict that caused the abort. */
|
||||
and x17, x16, #0x3F
|
||||
cmp x17, #0x30
|
||||
b.ne 1b
|
||||
|
||||
/* Get the ASID in x17. */
|
||||
mrs x17, ttbr0_el1
|
||||
and x17, x17, #(0xFFFF << 48)
|
||||
|
||||
/* Check if FAR is valid by examining the FnV bit. */
|
||||
tbnz x16, #10, 6f
|
||||
|
||||
/* FAR is valid, so we can invalidate the address it holds. */
|
||||
mrs x16, far_el1
|
||||
lsr x16, x16, #12
|
||||
orr x17, x16, x17
|
||||
tlbi vaae1, x17
|
||||
b 7f
|
||||
|
||||
6: /* There's a TLB conflict and FAR isn't valid. */
|
||||
/* Invalidate the entire TLB. */
|
||||
tlbi vmalle1
|
||||
|
||||
7: /* Return from a TLB conflict. */
|
||||
/* Ensure instruction consistency. */
|
||||
dsb ish
|
||||
isb
|
||||
|
||||
/* Restore x16 and x17. */
|
||||
ldp x16, x17, [sp], 16
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
|
||||
/* ams::kern::arm64::EL1SynchronousExceptionHandler() */
|
||||
.section .text._ZN3ams4kern5arm6430EL1SynchronousExceptionHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6430EL1SynchronousExceptionHandlerEv
|
||||
.type _ZN3ams4kern5arm6430EL1SynchronousExceptionHandlerEv, %function
|
||||
_ZN3ams4kern5arm6430EL1SynchronousExceptionHandlerEv:
|
||||
/* Nintendo uses the "unused" virtual timer compare value as a scratch register. */
|
||||
msr cntv_cval_el0, x0
|
||||
|
||||
/* Get and parse the exception syndrome register. */
|
||||
mrs x0, esr_el1
|
||||
lsr x0, x0, #0x1a
|
||||
|
||||
/* Is this an instruction abort? */
|
||||
cmp x0, #0x21
|
||||
b.eq 5f
|
||||
|
||||
/* Is this a data abort? */
|
||||
cmp x0, #0x25
|
||||
b.eq 5f
|
||||
|
||||
1: /* The exception is not a data abort or instruction abort caused by a TLB conflict. */
|
||||
/* Load the CoreLocalContext into x0. */
|
||||
mrs x0, tpidr_el1
|
||||
cbz x0, 2f
|
||||
|
||||
/* Load the exception stack top from the context. */
|
||||
ldr x0, [x0, #0x28]
|
||||
|
||||
/* Setup the stack for a generic exception handle */
|
||||
sub x0, x0, #0x20
|
||||
str x1, [x0, #16]
|
||||
mov x1, sp
|
||||
str x1, [x0]
|
||||
mov sp, x0
|
||||
ldr x1, [x0, #16]
|
||||
mrs x0, cntv_cval_el0
|
||||
str x0, [sp, #8]
|
||||
|
||||
/* Check again if this is a data abort from EL1. */
|
||||
mrs x0, esr_el1
|
||||
lsr x1, x0, #0x1a
|
||||
cmp x1, #0x25
|
||||
b.ne 3f
|
||||
|
||||
/* Data abort. Check if it was from trying to access userspace memory. */
|
||||
mrs x1, elr_el1
|
||||
adr x0, _ZN3ams4kern5arm6438UserspaceMemoryAccessFunctionAreaBeginEv
|
||||
cmp x1, x0
|
||||
b.lo 3f
|
||||
adr x0, _ZN3ams4kern5arm6436UserspaceMemoryAccessFunctionAreaEndEv
|
||||
cmp x1, x0
|
||||
b.hs 3f
|
||||
|
||||
/* We aborted trying to access userspace memory. */
|
||||
/* All functions that access user memory return a boolean for whether they succeeded. */
|
||||
/* With that in mind, we can simply restore the stack pointer and return false directly. */
|
||||
ldr x0, [sp]
|
||||
mov sp, x0
|
||||
|
||||
/* Return false. */
|
||||
mov x0, #0x0
|
||||
msr elr_el1, x30
|
||||
eret
|
||||
|
||||
2: /* The CoreLocalContext is nullptr. */
|
||||
/* Setup the stack for a generic exception handle. */
|
||||
/* NOTE: Nintendo does not restore X0 here, and thus saves nullptr. */
|
||||
/* This is probably not their intention, so we'll fix it. */
|
||||
/* NOTE: Nintendo also does not really save SP correctly, and so we */
|
||||
/* will also fix that. */
|
||||
mov x0, sp
|
||||
sub x0, x0, #0x20
|
||||
str x1, [x0, #16]
|
||||
mov x1, sp
|
||||
str x1, [x0]
|
||||
mov sp, x0
|
||||
mrs x0, cntv_cval_el0
|
||||
str x0, [sp, #8]
|
||||
|
||||
3: /* The exception wasn't an triggered by copying memory from userspace. */
|
||||
ldr x0, [sp, #8]
|
||||
ldr x1, [sp, #16]
|
||||
|
||||
/* Create a KExceptionContext to pass to HandleException. */
|
||||
sub sp, sp, #0x120
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x23, [sp, #(8 * 22)]
|
||||
stp x24, x25, [sp, #(8 * 24)]
|
||||
stp x26, x27, [sp, #(8 * 26)]
|
||||
stp x28, x29, [sp, #(8 * 28)]
|
||||
mrs x20, sp_el0
|
||||
mrs x21, elr_el1
|
||||
mrs x22, spsr_el1
|
||||
mrs x23, tpidr_el0
|
||||
mov w22, w22
|
||||
stp x30, x20, [sp, #(8 * 30)]
|
||||
stp x21, x22, [sp, #(8 * 32)]
|
||||
str x23, [sp, #(8 * 34)]
|
||||
|
||||
/* Call ams::kern::arm64::HandleException(ams::kern::arm64::KExceptionContext *) */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, sp
|
||||
bl _ZN3ams4kern5arm6415HandleExceptionEPNS1_17KExceptionContextE
|
||||
|
||||
4: /* HandleException should never return. The best we can do is infinite loop. */
|
||||
b 4b
|
||||
|
||||
5: /* Check if there's a TLB conflict that caused the abort. */
|
||||
/* NOTE: There is a Nintendo bug in this code that we correct. */
|
||||
/* Nintendo compares the low 6 bits of x0 without restoring the value. */
|
||||
/* They intend to check the DFSC/IFSC bits of esr_el1, but because they */
|
||||
/* shifted esr earlier, the check is invalid and always fails. */
|
||||
mrs x0, esr_el1
|
||||
and x0, x0, #0x3F
|
||||
cmp x0, #0x30
|
||||
b.ne 1b
|
||||
|
||||
/* Check if FAR is valid by examining the FnV bit. */
|
||||
/* NOTE: Nintendo again has a bug here, the same as above. */
|
||||
/* They do not refresh the value of x0, and again compare with */
|
||||
/* the relevant bit already masked out of x0. */
|
||||
mrs x0, esr_el1
|
||||
tbnz x0, #10, 6f
|
||||
|
||||
/* FAR is valid, so we can invalidate the address it holds. */
|
||||
mrs x0, far_el1
|
||||
lsr x0, x0, #12
|
||||
tlbi vaae1, x0
|
||||
b 7f
|
||||
|
||||
6: /* There's a TLB conflict and FAR isn't valid. */
|
||||
/* Invalidate the entire TLB. */
|
||||
tlbi vmalle1
|
||||
|
||||
7: /* Return from a TLB conflict. */
|
||||
/* Ensure instruction consistency. */
|
||||
dsb ish
|
||||
isb
|
||||
|
||||
/* Restore x0 from scratch. */
|
||||
mrs x0, cntv_cval_el0
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
|
||||
/* ams::kern::arm64::FpuAccessExceptionHandler() */
|
||||
.section .text._ZN3ams4kern5arm6425FpuAccessExceptionHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6425FpuAccessExceptionHandlerEv
|
||||
.type _ZN3ams4kern5arm6425FpuAccessExceptionHandlerEv, %function
|
||||
_ZN3ams4kern5arm6425FpuAccessExceptionHandlerEv:
|
||||
/* Save registers that need saving. */
|
||||
sub sp, sp, #(8 * 24)
|
||||
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x30, [sp, #(8 * 22)]
|
||||
|
||||
mrs x18, tpidr_el1
|
||||
mrs x19, sp_el0
|
||||
mrs x20, elr_el1
|
||||
mrs x21, spsr_el1
|
||||
|
||||
/* Invoke the FPU context switch handler. */
|
||||
bl _ZN3ams4kern5arm6423FpuContextSwitchHandlerEv
|
||||
|
||||
/* Restore registers that we saved. */
|
||||
msr sp_el0, x19
|
||||
msr elr_el1, x20
|
||||
msr spsr_el1, x21
|
||||
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
ldp x16, x17, [sp, #(8 * 16)]
|
||||
ldp x18, x19, [sp, #(8 * 18)]
|
||||
ldp x20, x21, [sp, #(8 * 20)]
|
||||
ldp x22, x30, [sp, #(8 * 22)]
|
||||
|
||||
add sp, sp, #(8 * 24)
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
/* ams::kern::arm64::EL1SystemErrorHandler() */
|
||||
.section .text._ZN3ams4kern5arm6421EL1SystemErrorHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6421EL1SystemErrorHandlerEv
|
||||
.type _ZN3ams4kern5arm6421EL1SystemErrorHandlerEv, %function
|
||||
_ZN3ams4kern5arm6421EL1SystemErrorHandlerEv:
|
||||
/* Nintendo uses the "unused" virtual timer compare value as a scratch register. */
|
||||
msr cntv_cval_el0, x0
|
||||
|
||||
/* Load the exception stack top from the context. */
|
||||
ldr x0, [x0, #0x28]
|
||||
|
||||
/* Setup the stack for a generic exception handle */
|
||||
sub x0, x0, #0x20
|
||||
str x1, [x0, #16]
|
||||
mov x1, sp
|
||||
str x1, [x0]
|
||||
mov sp, x0
|
||||
ldr x1, [x0, #16]
|
||||
mrs x0, cntv_cval_el0
|
||||
str x0, [sp, #8]
|
||||
|
||||
/* Create a KExceptionContext to pass to HandleException. */
|
||||
sub sp, sp, #0x120
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x23, [sp, #(8 * 22)]
|
||||
stp x24, x25, [sp, #(8 * 24)]
|
||||
stp x26, x27, [sp, #(8 * 26)]
|
||||
stp x28, x29, [sp, #(8 * 28)]
|
||||
mrs x20, sp_el0
|
||||
mrs x21, elr_el1
|
||||
mrs x22, spsr_el1
|
||||
mrs x23, tpidr_el0
|
||||
mov w22, w22
|
||||
stp x30, x20, [sp, #(8 * 30)]
|
||||
stp x21, x22, [sp, #(8 * 32)]
|
||||
str x23, [sp, #(8 * 34)]
|
||||
|
||||
/* Invoke ams::kern::arm64::HandleException(ams::kern::arm64::KExceptionContext *). */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, sp
|
||||
bl _ZN3ams4kern5arm6415HandleExceptionEPNS1_17KExceptionContextE
|
||||
|
||||
1: /* HandleException should never return. The best we can do is infinite loop. */
|
||||
b 1b
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
||||
/* ams::kern::arm64::EL0SystemErrorHandler() */
|
||||
.section .text._ZN3ams4kern5arm6421EL0SystemErrorHandlerEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6421EL0SystemErrorHandlerEv
|
||||
.type _ZN3ams4kern5arm6421EL0SystemErrorHandlerEv, %function
|
||||
_ZN3ams4kern5arm6421EL0SystemErrorHandlerEv:
|
||||
/* Create a KExceptionContext to pass to HandleException. */
|
||||
sub sp, sp, #0x120
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, x15, [sp, #(8 * 14)]
|
||||
stp x16, x17, [sp, #(8 * 16)]
|
||||
stp x18, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x23, [sp, #(8 * 22)]
|
||||
stp x24, x25, [sp, #(8 * 24)]
|
||||
stp x26, x27, [sp, #(8 * 26)]
|
||||
stp x28, x29, [sp, #(8 * 28)]
|
||||
mrs x20, sp_el0
|
||||
mrs x21, elr_el1
|
||||
mrs x22, spsr_el1
|
||||
mrs x23, tpidr_el0
|
||||
mov w22, w22
|
||||
stp x30, x20, [sp, #(8 * 30)]
|
||||
stp x21, x22, [sp, #(8 * 32)]
|
||||
str x23, [sp, #(8 * 34)]
|
||||
|
||||
/* Invoke ams::kern::arm64::HandleException(ams::kern::arm64::KExceptionContext *). */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, sp
|
||||
bl _ZN3ams4kern5arm6415HandleExceptionEPNS1_17KExceptionContextE
|
||||
|
||||
/* Restore state from the context. */
|
||||
ldp x30, x20, [sp, #(8 * 30)]
|
||||
ldp x21, x22, [sp, #(8 * 32)]
|
||||
ldr x23, [sp, #(8 * 34)]
|
||||
msr sp_el0, x20
|
||||
msr elr_el1, x21
|
||||
msr spsr_el1, x22
|
||||
msr tpidr_el0, x23
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
ldp x16, x17, [sp, #(8 * 16)]
|
||||
ldp x18, x19, [sp, #(8 * 18)]
|
||||
ldp x20, x21, [sp, #(8 * 20)]
|
||||
ldp x22, x23, [sp, #(8 * 22)]
|
||||
ldp x24, x25, [sp, #(8 * 24)]
|
||||
ldp x26, x27, [sp, #(8 * 26)]
|
||||
ldp x28, x29, [sp, #(8 * 28)]
|
||||
add sp, sp, #0x120
|
||||
|
||||
/* Return from the exception. */
|
||||
eret
|
||||
|
|
@ -31,6 +31,101 @@ namespace ams::kern::arm64 {
|
|||
this->interrupt_controller.Finalize(core_id);
|
||||
}
|
||||
|
||||
bool KInterruptManager::OnHandleInterrupt() {
|
||||
/* Get the interrupt id. */
|
||||
const u32 raw_irq = this->interrupt_controller.GetIrq();
|
||||
const s32 irq = KInterruptController::ConvertRawIrq(raw_irq);
|
||||
|
||||
/* If the IRQ is spurious, we don't need to reschedule. */
|
||||
if (irq < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
KInterruptTask *task = nullptr;
|
||||
if (KInterruptController::IsLocal(irq)) {
|
||||
/* Get local interrupt entry. */
|
||||
auto &entry = GetLocalInterruptEntry(irq);
|
||||
if (entry.handler != nullptr) {
|
||||
/* Set manual clear needed if relevant. */
|
||||
if (entry.manually_cleared) {
|
||||
this->interrupt_controller.Disable(irq);
|
||||
entry.needs_clear = true;
|
||||
}
|
||||
|
||||
/* Set the handler. */
|
||||
task = entry.handler->OnInterrupt(irq);
|
||||
} else {
|
||||
MESOSPHERE_LOG("Core%d: Unhandled local interrupt %d\n", GetCurrentCoreId(), irq);
|
||||
}
|
||||
} else if (KInterruptController::IsGlobal(irq)) {
|
||||
KScopedSpinLock lk(GetLock());
|
||||
|
||||
/* Get global interrupt entry. */
|
||||
auto &entry = GetGlobalInterruptEntry(irq);
|
||||
if (entry.handler != nullptr) {
|
||||
/* Set manual clear needed if relevant. */
|
||||
if (entry.manually_cleared) {
|
||||
this->interrupt_controller.Disable(irq);
|
||||
entry.needs_clear = true;
|
||||
}
|
||||
|
||||
/* Set the handler. */
|
||||
task = entry.handler->OnInterrupt(irq);
|
||||
} else {
|
||||
MESOSPHERE_LOG("Core%d: Unhandled global interrupt %d\n", GetCurrentCoreId(), irq);
|
||||
}
|
||||
} else {
|
||||
MESOSPHERE_LOG("Invalid interrupt %d\n", irq);
|
||||
}
|
||||
|
||||
/* If we found no task, then we don't need to reschedule. */
|
||||
if (task == nullptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* If the task isn't the dummy task, we should add it to the queue. */
|
||||
if (task != GetDummyInterruptTask()) {
|
||||
/* TODO: Kernel::GetInterruptTaskManager().Enqueue(task); */
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void KInterruptManager::HandleInterrupt(bool user_mode) {
|
||||
/* On interrupt, call OnHandleInterrupt() to determine if we need rescheduling and handle. */
|
||||
const bool needs_scheduling = Kernel::GetInterruptManager().OnHandleInterrupt();
|
||||
|
||||
/* If we need scheduling, */
|
||||
if (needs_scheduling) {
|
||||
/* Handle any changes needed to the user preemption state. */
|
||||
if (user_mode && GetCurrentThread().GetUserPreemptionState() != 0 && GetCurrentProcess().GetPreemptionStatePinnedThread(GetCurrentCoreId()) == nullptr) {
|
||||
KScopedSchedulerLock sl;
|
||||
|
||||
/* Note the preemption state in process. */
|
||||
GetCurrentProcess().SetPreemptionState();
|
||||
|
||||
/* Set the kernel preemption state flag. */
|
||||
GetCurrentThread().SetKernelPreemptionState(1);;
|
||||
|
||||
/* Request interrupt scheduling. */
|
||||
Kernel::GetScheduler().RequestScheduleOnInterrupt();
|
||||
} else {
|
||||
/* Request interrupt scheduling. */
|
||||
Kernel::GetScheduler().RequestScheduleOnInterrupt();
|
||||
}
|
||||
}
|
||||
|
||||
/* If user mode, check if the thread needs termination. */
|
||||
/* If it does, we can take advantage of this to terminate it. */
|
||||
if (user_mode) {
|
||||
KThread *cur_thread = GetCurrentThreadPointer();
|
||||
if (cur_thread->IsTerminationRequested()) {
|
||||
KScopedInterruptEnable ei;
|
||||
cur_thread->Exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Result KInterruptManager::BindHandler(KInterruptHandler *handler, s32 irq, s32 core_id, s32 priority, bool manual_clear, bool level) {
|
||||
R_UNLESS(KInterruptController::IsGlobal(irq) || KInterruptController::IsLocal(irq), svc::ResultOutOfRange());
|
||||
|
||||
|
|
|
@ -27,6 +27,15 @@ namespace ams::kern::arm64 {
|
|||
|
||||
namespace {
|
||||
|
||||
ALWAYS_INLINE bool IsFpuEnabled() {
|
||||
return cpu::ArchitecturalFeatureAccessControlRegisterAccessor().IsFpEnabled();
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void EnableFpu() {
|
||||
cpu::ArchitecturalFeatureAccessControlRegisterAccessor().SetFpEnabled(true).Store();
|
||||
cpu::InstructionMemoryBarrier();
|
||||
}
|
||||
|
||||
uintptr_t SetupStackForUserModeThreadStarter(KVirtualAddress pc, KVirtualAddress k_sp, KVirtualAddress u_sp, uintptr_t arg, bool is_64_bit) {
|
||||
/* NOTE: Stack layout on entry looks like following: */
|
||||
/* SP */
|
||||
|
@ -128,4 +137,21 @@ namespace ams::kern::arm64 {
|
|||
return ResultSuccess();
|
||||
}
|
||||
|
||||
void KThreadContext::FpuContextSwitchHandler(KThread *thread) {
|
||||
MESOSPHERE_ASSERT(!KInterruptManager::AreInterruptsEnabled());
|
||||
MESOSPHERE_ASSERT(!IsFpuEnabled());
|
||||
|
||||
/* Enable the FPU. */
|
||||
EnableFpu();
|
||||
|
||||
/* Restore the FPU registers. */
|
||||
KProcess *process = thread->GetOwnerProcess();
|
||||
MESOSPHERE_ASSERT(process != nullptr);
|
||||
if (process->Is64Bit()) {
|
||||
RestoreFpuRegisters64(*thread->GetContext());
|
||||
} else {
|
||||
RestoreFpuRegisters32(*thread->GetContext());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* ams::kern::arm64::UserspaceMemoryAccessFunctionAreaBegin() */
|
||||
.section .text._ZN3ams4kern5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6438UserspaceMemoryAccessFunctionAreaBeginEv
|
||||
.type _ZN3ams4kern5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, %function
|
||||
_ZN3ams4kern5arm6438UserspaceMemoryAccessFunctionAreaBeginEv:
|
||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
||||
|
||||
/* ================ All Userspace Memory Functions after this line. ================ */
|
||||
|
||||
/* TODO */
|
||||
|
||||
/* ================ All Userspace Memory Functions before this line. ================ */
|
||||
|
||||
/* ams::kern::arm64::UserspaceMemoryAccessFunctionAreaEnd() */
|
||||
.section .text._ZN3ams4kern5arm6436UserspaceMemoryAccessFunctionAreaEndEv, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6436UserspaceMemoryAccessFunctionAreaEndEv
|
||||
.type _ZN3ams4kern5arm6436UserspaceMemoryAccessFunctionAreaEndEv, %function
|
||||
_ZN3ams4kern5arm6436UserspaceMemoryAccessFunctionAreaEndEv:
|
||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
|
@ -0,0 +1,343 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* ams::kern::arm64::SvcHandler64() */
|
||||
.section .text._ZN3ams4kern5arm6412SvcHandler64Ev, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6412SvcHandler64Ev
|
||||
.type _ZN3ams4kern5arm6412SvcHandler64Ev, %function
|
||||
_ZN3ams4kern5arm6412SvcHandler64Ev:
|
||||
/* Create a KExceptionContext for the exception. */
|
||||
sub sp, sp, #0x120
|
||||
|
||||
/* Save registers needed for ReturnFromException */
|
||||
stp x9, x10, [sp, #(8 * 9)]
|
||||
str x11, [sp, #(8 * 11)]
|
||||
str x18, [sp, #(8 * 18)]
|
||||
|
||||
mrs x8, sp_el0
|
||||
mrs x9, elr_el1
|
||||
mrs x10, spsr_el1
|
||||
mrs x11, tpidr_el0
|
||||
|
||||
/* Save callee-saved registers. */
|
||||
stp x19, x20, [sp, #(8 * 19)]
|
||||
stp x21, x22, [sp, #(8 * 21)]
|
||||
stp x23, x24, [sp, #(8 * 23)]
|
||||
stp x25, x26, [sp, #(8 * 25)]
|
||||
stp x27, x28, [sp, #(8 * 27)]
|
||||
|
||||
/* Save miscellaneous registers. */
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x29, x30, [sp, #(8 * 29)]
|
||||
stp x8, x9, [sp, #(8 * 31)]
|
||||
stp x10, x11, [sp, #(8 * 33)]
|
||||
|
||||
/* Check if the SVC index is out of range. */
|
||||
mrs x8, esr_el1
|
||||
and x8, x8, #0xFF
|
||||
cmp x8, #0x80
|
||||
b.ge 3f
|
||||
|
||||
/* Check the specific SVC permission bit for allowal. */
|
||||
mov x9, sp
|
||||
add x9, x9, x8, lsr#3
|
||||
ldrb w9, [x9, #0x120]
|
||||
and x10, x8, #0x7
|
||||
lsr x10, x9, x10
|
||||
tst x10, #1
|
||||
b.eq 3f
|
||||
|
||||
/* Check if our preemption state allows us to call SVCs. */
|
||||
mrs x10, tpidrro_el0
|
||||
ldrh w10, [x10, #0x100]
|
||||
cbz w10, 1f
|
||||
|
||||
/* It might not, so check the stack params to see if we must not allow the SVC. */
|
||||
ldrb w10, [sp, #(0x120 + 0x14)]
|
||||
cbz w10, 3f
|
||||
|
||||
1: /* We can call the SVC. */
|
||||
adr x10, _ZN3ams4kern3svc10SvcTable64E
|
||||
ldr x11, [x10, x8, lsl#3]
|
||||
cbz x11, 3f
|
||||
|
||||
/* Note that we're calling the SVC. */
|
||||
mov w10, #1
|
||||
strb w10, [sp, #(0x120 + 0x12)]
|
||||
strb w8, [sp, #(0x120 + 0x11)]
|
||||
|
||||
/* Invoke the SVC handler. */
|
||||
mrs x18, tpidr_el1
|
||||
msr daifclr, #2
|
||||
blr x11
|
||||
msr daifset, #2
|
||||
|
||||
2: /* We completed the SVC, and we should handle DPC. */
|
||||
/* Check the dpc flags. */
|
||||
ldrb w8, [sp, #(0x120 + 0x10)]
|
||||
cbz w8, 4f
|
||||
|
||||
/* We have DPC to do! */
|
||||
/* Save registers and call ams::kern::KDpcManager::HandleDpc(). */
|
||||
sub sp, sp, #0x40
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
bl _ZN3ams4kern11KDpcManager9HandleDpcEv
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
add sp, sp, #0x40
|
||||
b 2b
|
||||
|
||||
3: /* Invalid SVC. */
|
||||
/* Setup the context to call into HandleException. */
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp xzr, xzr, [sp, #(8 * 8)]
|
||||
stp xzr, xzr, [sp, #(8 * 10)]
|
||||
stp xzr, xzr, [sp, #(8 * 12)]
|
||||
stp xzr, xzr, [sp, #(8 * 14)]
|
||||
stp xzr, xzr, [sp, #(8 * 16)]
|
||||
stp xzr, x19, [sp, #(8 * 18)]
|
||||
stp x20, x21, [sp, #(8 * 20)]
|
||||
stp x22, x23, [sp, #(8 * 22)]
|
||||
stp x24, x25, [sp, #(8 * 24)]
|
||||
stp x26, x27, [sp, #(8 * 26)]
|
||||
stp x28, x29, [sp, #(8 * 28)]
|
||||
|
||||
/* Call ams::kern::arm64::HandleException(ams::kern::arm64::KExceptionContext *) */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, sp
|
||||
bl _ZN3ams4kern5arm6415HandleExceptionEPNS1_17KExceptionContextE
|
||||
|
||||
/* Restore registers. */
|
||||
ldp x30, x8, [sp, #(8 * 30)]
|
||||
ldp x9, x10, [sp, #(8 * 32)]
|
||||
ldr x11, [sp, #(8 * 34)]
|
||||
msr sp_el0, x8
|
||||
msr elr_el1, x9
|
||||
msr spsr_el1, x10
|
||||
msr tpidr_el0, x11
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
ldp x16, x17, [sp, #(8 * 16)]
|
||||
ldp x18, x19, [sp, #(8 * 18)]
|
||||
ldp x20, x21, [sp, #(8 * 20)]
|
||||
ldp x22, x23, [sp, #(8 * 22)]
|
||||
ldp x24, x25, [sp, #(8 * 24)]
|
||||
ldp x26, x27, [sp, #(8 * 26)]
|
||||
ldp x28, x29, [sp, #(8 * 28)]
|
||||
|
||||
/* Return. */
|
||||
add sp, sp, #0x120
|
||||
eret
|
||||
|
||||
4: /* Return from SVC. */
|
||||
/* Clear our in-SVC note. */
|
||||
strb wzr, [sp, #(0x120 + 0x12)]
|
||||
|
||||
/* Restore registers. */
|
||||
ldp x30, x8, [sp, #(8 * 30)]
|
||||
ldp x9, x10, [sp, #(8 * 32)]
|
||||
ldr x11, [sp, #(8 * 34)]
|
||||
msr sp_el0, x8
|
||||
msr elr_el1, x9
|
||||
msr spsr_el1, x10
|
||||
msr tpidr_el0, x11
|
||||
|
||||
/* Clear registers. */
|
||||
mov x8, xzr
|
||||
mov x9, xzr
|
||||
mov x10, xzr
|
||||
mov x11, xzr
|
||||
mov x12, xzr
|
||||
mov x13, xzr
|
||||
mov x14, xzr
|
||||
mov x15, xzr
|
||||
mov x16, xzr
|
||||
mov x17, xzr
|
||||
mov x18, xzr
|
||||
|
||||
/* Return. */
|
||||
add sp, sp, #0x120
|
||||
eret
|
||||
|
||||
/* ams::kern::arm64::SvcHandler32() */
|
||||
.section .text._ZN3ams4kern5arm6412SvcHandler32Ev, "ax", %progbits
|
||||
.global _ZN3ams4kern5arm6412SvcHandler32Ev
|
||||
.type _ZN3ams4kern5arm6412SvcHandler32Ev, %function
|
||||
_ZN3ams4kern5arm6412SvcHandler32Ev:
|
||||
/* Ensure that our registers are 32-bit. */
|
||||
mov w0, w0
|
||||
mov w1, w1
|
||||
mov w2, w2
|
||||
mov w3, w3
|
||||
mov w4, w4
|
||||
mov w5, w5
|
||||
mov w6, w6
|
||||
mov w7, w7
|
||||
|
||||
/* Create a KExceptionContext for the exception. */
|
||||
sub sp, sp, #0x120
|
||||
|
||||
/* Save system registers */
|
||||
mrs x17, elr_el1
|
||||
mrs x20, spsr_el1
|
||||
mrs x19, tpidr_el0
|
||||
stp x17, x20, [sp, #(8 * 32)]
|
||||
str x19, [sp, #(8 * 34)]
|
||||
|
||||
/* Save registers. */
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp x8, x9, [sp, #(8 * 8)]
|
||||
stp x10, x11, [sp, #(8 * 10)]
|
||||
stp x12, x13, [sp, #(8 * 12)]
|
||||
stp x14, xzr, [sp, #(8 * 14)]
|
||||
|
||||
/* Check if the SVC index is out of range. */
|
||||
mrs x16, esr_el1
|
||||
and x16, x16, #0xFF
|
||||
cmp x16, #0x80
|
||||
b.ge 3f
|
||||
|
||||
/* Check the specific SVC permission bit for allowal. */
|
||||
mov x20, sp
|
||||
add x20, x20, x16, lsr#3
|
||||
ldrb w20, [x20, #0x120]
|
||||
and x17, x16, #0x7
|
||||
lsr x17, x20, x17
|
||||
tst x17, #1
|
||||
b.eq 3f
|
||||
|
||||
/* Check if our preemption state allows us to call SVCs. */
|
||||
mrs x15, tpidrro_el0
|
||||
ldrh w15, [x15, #0x100]
|
||||
cbz w15, 1f
|
||||
|
||||
/* It might not, so check the stack params to see if we must not allow the SVC. */
|
||||
ldrb w15, [sp, #(0x120 + 0x14)]
|
||||
cbz w15, 3f
|
||||
|
||||
1: /* We can call the SVC. */
|
||||
adr x15, _ZN3ams4kern3svc16SvcTable64From32E
|
||||
ldr x19, [x15, x16, lsl#3]
|
||||
cbz x19, 3f
|
||||
|
||||
/* Note that we're calling the SVC. */
|
||||
mov w15, #1
|
||||
strb w15, [sp, #(0x120 + 0x12)]
|
||||
strb w16, [sp, #(0x120 + 0x11)]
|
||||
|
||||
/* Invoke the SVC handler. */
|
||||
mrs x18, tpidr_el1
|
||||
msr daifclr, #2
|
||||
blr x19
|
||||
msr daifset, #2
|
||||
|
||||
2: /* We completed the SVC, and we should handle DPC. */
|
||||
/* Check the dpc flags. */
|
||||
ldrb w16, [sp, #(0x120 + 0x10)]
|
||||
cbz w16, 4f
|
||||
|
||||
/* We have DPC to do! */
|
||||
/* Save registers and call ams::kern::KDpcManager::HandleDpc(). */
|
||||
sub sp, sp, #0x20
|
||||
stp w0, w1, [sp, #(4 * 0)]
|
||||
stp w2, w3, [sp, #(4 * 2)]
|
||||
stp w4, w5, [sp, #(4 * 4)]
|
||||
stp w6, w7, [sp, #(4 * 6)]
|
||||
bl _ZN3ams4kern11KDpcManager9HandleDpcEv
|
||||
ldp w0, w1, [sp, #(4 * 0)]
|
||||
ldp w2, w3, [sp, #(4 * 2)]
|
||||
ldp w4, w5, [sp, #(4 * 4)]
|
||||
ldp w6, w7, [sp, #(4 * 6)]
|
||||
add sp, sp, #0x20
|
||||
b 2b
|
||||
|
||||
3: /* Invalid SVC. */
|
||||
/* Setup the context to call into HandleException. */
|
||||
stp x0, x1, [sp, #(8 * 0)]
|
||||
stp x2, x3, [sp, #(8 * 2)]
|
||||
stp x4, x5, [sp, #(8 * 4)]
|
||||
stp x6, x7, [sp, #(8 * 6)]
|
||||
stp xzr, xzr, [sp, #(8 * 16)]
|
||||
stp xzr, xzr, [sp, #(8 * 18)]
|
||||
stp xzr, xzr, [sp, #(8 * 20)]
|
||||
stp xzr, xzr, [sp, #(8 * 22)]
|
||||
stp xzr, xzr, [sp, #(8 * 24)]
|
||||
stp xzr, xzr, [sp, #(8 * 26)]
|
||||
stp xzr, xzr, [sp, #(8 * 28)]
|
||||
stp xzr, xzr, [sp, #(8 * 30)]
|
||||
|
||||
/* Call ams::kern::arm64::HandleException(ams::kern::arm64::KExceptionContext *) */
|
||||
mrs x18, tpidr_el1
|
||||
mov x0, sp
|
||||
bl _ZN3ams4kern5arm6415HandleExceptionEPNS1_17KExceptionContextE
|
||||
|
||||
/* Restore registers. */
|
||||
ldp x17, x20, [sp, #(8 * 32)]
|
||||
ldr x19, [sp, #(8 * 34)]
|
||||
msr elr_el1, x17
|
||||
msr spsr_el1, x20
|
||||
msr tpidr_el0, x19
|
||||
ldp x0, x1, [sp, #(8 * 0)]
|
||||
ldp x2, x3, [sp, #(8 * 2)]
|
||||
ldp x4, x5, [sp, #(8 * 4)]
|
||||
ldp x6, x7, [sp, #(8 * 6)]
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, x15, [sp, #(8 * 14)]
|
||||
|
||||
/* Return. */
|
||||
add sp, sp, #0x120
|
||||
eret
|
||||
|
||||
4: /* Return from SVC. */
|
||||
/* Clear our in-SVC note. */
|
||||
strb wzr, [sp, #(0x120 + 0x12)]
|
||||
|
||||
/* Restore registers. */
|
||||
ldp x8, x9, [sp, #(8 * 8)]
|
||||
ldp x10, x11, [sp, #(8 * 10)]
|
||||
ldp x12, x13, [sp, #(8 * 12)]
|
||||
ldp x14, xzr, [sp, #(8 * 14)]
|
||||
ldp x17, x20, [sp, #(8 * 32)]
|
||||
ldr x19, [sp, #(8 * 34)]
|
||||
msr elr_el1, x17
|
||||
msr spsr_el1, x20
|
||||
msr tpidr_el0, x19
|
||||
|
||||
/* Return. */
|
||||
add sp, sp, #0x120
|
||||
eret
|
|
@ -13,15 +13,16 @@
|
|||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <mesosphere.hpp>
|
||||
#include <mesosphere/kern_debug_log.hpp>
|
||||
#include <mesosphere/svc/kern_svc_tables.hpp>
|
||||
#include <vapours/svc/svc_codegen.hpp>
|
||||
|
||||
/* TODO: Enable compilation of this file when the kernel supports supervisor calls. */
|
||||
#if 0
|
||||
namespace ams::kern::svc {
|
||||
|
||||
namespace {
|
||||
|
||||
/* TODO: Enable compilation of this file when the kernel supports supervisor calls. */
|
||||
#if 0
|
||||
#define DECLARE_SVC_STRUCT(ID, RETURN_TYPE, NAME, ...) \
|
||||
class NAME { \
|
||||
private: \
|
||||
|
@ -30,7 +31,14 @@ namespace ams::kern::svc {
|
|||
static NOINLINE void Call64() { return Impl::Call64(); } \
|
||||
static NOINLINE void Call64From32() { return Impl::Call64From32(); } \
|
||||
};
|
||||
|
||||
#else
|
||||
#define DECLARE_SVC_STRUCT(ID, RETURN_TYPE, NAME, ...) \
|
||||
class NAME { \
|
||||
public: \
|
||||
static NOINLINE void Call64() { MESOSPHERE_LOG("Stubbed Svc"#NAME"64 was called\n"); } \
|
||||
static NOINLINE void Call64From32() { MESOSPHERE_LOG("Stubbed Svc"#NAME"64From32 was called\n"); } \
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
/* Set omit-frame-pointer to prevent GCC from emitting MOV X29, SP instructions. */
|
||||
|
@ -66,4 +74,3 @@ namespace ams::kern::svc {
|
|||
}();
|
||||
|
||||
}
|
||||
#endif
|
24
libraries/libmesosphere/source/kern_k_process.cpp
Normal file
24
libraries/libmesosphere/source/kern_k_process.cpp
Normal file
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <mesosphere.hpp>
|
||||
|
||||
namespace ams::kern {
|
||||
|
||||
void KProcess::SetPreemptionState() {
|
||||
/* TODO */
|
||||
}
|
||||
|
||||
}
|
|
@ -114,11 +114,11 @@ namespace ams::kern {
|
|||
for (size_t core_id = 0; core_id < cpu::NumCores; core_id++) {
|
||||
KThread *top_thread = priority_queue.GetScheduledFront(core_id);
|
||||
if (top_thread != nullptr) {
|
||||
/* If the thread has no waiters, we might prefer a suggestion from the owner process to it. */
|
||||
/* If the thread has no waiters, we need to check if the process has a thread pinned by PreemptionState. */
|
||||
if (top_thread->GetNumKernelWaiters() == 0) {
|
||||
if (KProcess *parent = top_thread->GetOwnerProcess(); parent != nullptr) {
|
||||
if (KThread *suggested = parent->GetSuggestedTopThread(core_id); suggested != nullptr && suggested != top_thread) {
|
||||
/* We prefer our parent's suggestion whenever possible. However, we also don't want to schedule un-runnable threads. */
|
||||
if (KThread *suggested = parent->GetPreemptionStatePinnedThread(core_id); suggested != nullptr && suggested != top_thread) {
|
||||
/* We prefer our parent's pinned thread possible. However, we also don't want to schedule un-runnable threads. */
|
||||
if (suggested->GetRawState() == KThread::ThreadState_Runnable) {
|
||||
top_thread = suggested;
|
||||
} else {
|
||||
|
|
|
@ -42,7 +42,7 @@ namespace ams::kern {
|
|||
clc->current.scheduler = std::addressof(clc->scheduler);
|
||||
clc->current.interrupt_task_manager = std::addressof(clc->interrupt_task_manager);
|
||||
clc->current.core_id = core_id;
|
||||
clc->current.exception_stack_bottom = GetVoidPointer(KMemoryLayout::GetExceptionStackBottomAddress(core_id));
|
||||
clc->current.exception_stack_top = GetVoidPointer(KMemoryLayout::GetExceptionStackTopAddress(core_id) - sizeof(KThread::StackParameters));
|
||||
|
||||
/* Clear debugging counters. */
|
||||
clc->num_sw_interrupts = 0;
|
||||
|
|
|
@ -70,72 +70,95 @@
|
|||
vector_base _ZN3ams4kern16ExceptionVectorsEv
|
||||
|
||||
/* Current EL, SP0 */
|
||||
.global unknown_exception
|
||||
unknown_exception:
|
||||
vector_entry synch_sp0
|
||||
/* Just infinite loop. */
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b synch_sp0
|
||||
check_vector_size synch_sp0
|
||||
|
||||
vector_entry irq_sp0
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b irq_sp0
|
||||
check_vector_size irq_sp0
|
||||
|
||||
vector_entry fiq_sp0
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b fiq_sp0
|
||||
check_vector_size fiq_sp0
|
||||
|
||||
vector_entry serror_sp0
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b serror_sp0
|
||||
check_vector_size serror_sp0
|
||||
|
||||
/* Current EL, SPx */
|
||||
vector_entry synch_spx
|
||||
b unknown_exception
|
||||
clrex
|
||||
b _ZN3ams4kern5arm6430EL1SynchronousExceptionHandlerEv
|
||||
check_vector_size synch_spx
|
||||
|
||||
vector_entry irq_spx
|
||||
b unknown_exception
|
||||
b _ZN3ams4kern5arm6422EL1IrqExceptionHandlerEv
|
||||
check_vector_size irq_spx
|
||||
|
||||
vector_entry fiq_spx
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b fiq_spx
|
||||
check_vector_size fiq_spx
|
||||
|
||||
vector_entry serror_spx
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b _ZN3ams4kern5arm6421EL1SystemErrorHandlerEv
|
||||
check_vector_size serror_spx
|
||||
|
||||
/* Lower EL, A64 */
|
||||
vector_entry synch_a64
|
||||
b unknown_exception
|
||||
clrex
|
||||
b _ZN3ams4kern5arm6430EL0SynchronousExceptionHandlerEv
|
||||
check_vector_size synch_a64
|
||||
|
||||
vector_entry irq_a64
|
||||
b unknown_exception
|
||||
clrex
|
||||
b _ZN3ams4kern5arm6422EL0IrqExceptionHandlerEv
|
||||
check_vector_size irq_a64
|
||||
|
||||
vector_entry fiq_a64
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b fiq_a64
|
||||
check_vector_size fiq_a64
|
||||
|
||||
vector_entry serror_a64
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b _ZN3ams4kern5arm6421EL0SystemErrorHandlerEv
|
||||
check_vector_size serror_a64
|
||||
|
||||
/* Lower EL, A32 */
|
||||
vector_entry synch_a32
|
||||
b unknown_exception
|
||||
clrex
|
||||
b _ZN3ams4kern5arm6430EL0SynchronousExceptionHandlerEv
|
||||
check_vector_size synch_a32
|
||||
|
||||
vector_entry irq_a32
|
||||
b unknown_exception
|
||||
clrex
|
||||
b _ZN3ams4kern5arm6422EL0IrqExceptionHandlerEv
|
||||
check_vector_size irq_a32
|
||||
|
||||
vector_entry fiq_a32
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b fiq_a32
|
||||
check_vector_size fiq_a32
|
||||
|
||||
vector_entry serror_a32
|
||||
b unknown_exception
|
||||
clrex
|
||||
nop
|
||||
b _ZN3ams4kern5arm6421EL0SystemErrorHandlerEv
|
||||
check_vector_size serror_a32
|
Loading…
Reference in a new issue