kern: mostly kill magic numbers in assembly, fix SVCs >= 0x80

This commit is contained in:
Michael Scire 2021-04-14 18:01:08 -07:00
parent 9e563d590b
commit 037b04ac60
15 changed files with 747 additions and 504 deletions

View file

@ -14,6 +14,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
#pragma once #pragma once
#include <mesosphere/kern_select_assembly_offsets.h>
namespace ams::kern::init { namespace ams::kern::init {
@ -31,5 +32,19 @@ namespace ams::kern::init {
u64 setup_function; u64 setup_function;
u64 exception_stack; u64 exception_stack;
}; };
static_assert(sizeof(KInitArguments) == INIT_ARGUMENTS_SIZE);
static_assert(__builtin_offsetof(KInitArguments, ttbr0) == INIT_ARGUMENTS_TTBR0);
static_assert(__builtin_offsetof(KInitArguments, ttbr1) == INIT_ARGUMENTS_TTBR1);
static_assert(__builtin_offsetof(KInitArguments, tcr) == INIT_ARGUMENTS_TCR);
static_assert(__builtin_offsetof(KInitArguments, mair) == INIT_ARGUMENTS_MAIR);
static_assert(__builtin_offsetof(KInitArguments, cpuactlr) == INIT_ARGUMENTS_CPUACTLR);
static_assert(__builtin_offsetof(KInitArguments, cpuectlr) == INIT_ARGUMENTS_CPUECTLR);
static_assert(__builtin_offsetof(KInitArguments, sctlr) == INIT_ARGUMENTS_SCTLR);
static_assert(__builtin_offsetof(KInitArguments, sp) == INIT_ARGUMENTS_SP);
static_assert(__builtin_offsetof(KInitArguments, entrypoint) == INIT_ARGUMENTS_ENTRYPOINT);
static_assert(__builtin_offsetof(KInitArguments, argument) == INIT_ARGUMENTS_ARGUMENT);
static_assert(__builtin_offsetof(KInitArguments, setup_function) == INIT_ARGUMENTS_SETUP_FUNCTION);
static_assert(__builtin_offsetof(KInitArguments, exception_stack) == INIT_ARGUMENTS_EXCEPTION_STACK);
} }

View file

@ -15,6 +15,9 @@
*/ */
#pragma once #pragma once
/* TODO: Different header for this? */
#define AMS_KERN_NUM_SUPERVISOR_CALLS 0xC0
#define THREAD_STACK_PARAMETERS_SIZE 0x30 #define THREAD_STACK_PARAMETERS_SIZE 0x30
#define THREAD_STACK_PARAMETERS_SVC_PERMISSION 0x00 #define THREAD_STACK_PARAMETERS_SVC_PERMISSION 0x00
#define THREAD_STACK_PARAMETERS_CONTEXT 0x18 #define THREAD_STACK_PARAMETERS_CONTEXT 0x18
@ -24,4 +27,124 @@
#define THREAD_STACK_PARAMETERS_CURRENT_SVC_ID 0x2B #define THREAD_STACK_PARAMETERS_CURRENT_SVC_ID 0x2B
#define THREAD_STACK_PARAMETERS_IS_CALLING_SVC 0x2C #define THREAD_STACK_PARAMETERS_IS_CALLING_SVC 0x2C
#define THREAD_STACK_PARAMETERS_IS_IN_EXCEPTION_HANDLER 0x2D #define THREAD_STACK_PARAMETERS_IS_IN_EXCEPTION_HANDLER 0x2D
#define THREAD_STACK_PARAMETERS_IS_PINNED 0x2E #define THREAD_STACK_PARAMETERS_IS_PINNED 0x2E
#define THREAD_CONTEXT_SIZE 0x290
#define THREAD_CONTEXT_CPU_REGISTERS 0x000
#define THREAD_CONTEXT_X19 0x000
#define THREAD_CONTEXT_X20 0x008
#define THREAD_CONTEXT_X21 0x010
#define THREAD_CONTEXT_X22 0x018
#define THREAD_CONTEXT_X23 0x020
#define THREAD_CONTEXT_X24 0x028
#define THREAD_CONTEXT_X25 0x030
#define THREAD_CONTEXT_X26 0x038
#define THREAD_CONTEXT_X27 0x040
#define THREAD_CONTEXT_X28 0x048
#define THREAD_CONTEXT_X29 0x050
#define THREAD_CONTEXT_LR 0x058
#define THREAD_CONTEXT_SP 0x060
#define THREAD_CONTEXT_CPACR 0x068
#define THREAD_CONTEXT_FPCR 0x070
#define THREAD_CONTEXT_FPSR 0x078
#define THREAD_CONTEXT_FPU_REGISTERS 0x080
#define THREAD_CONTEXT_LOCKED 0x280
#define THREAD_CONTEXT_X19_X20 THREAD_CONTEXT_X19
#define THREAD_CONTEXT_X21_X22 THREAD_CONTEXT_X21
#define THREAD_CONTEXT_X23_X24 THREAD_CONTEXT_X23
#define THREAD_CONTEXT_X25_X26 THREAD_CONTEXT_X25
#define THREAD_CONTEXT_X27_X28 THREAD_CONTEXT_X27
#define THREAD_CONTEXT_X29_X30 THREAD_CONTEXT_X29
#define THREAD_CONTEXT_LR_SP THREAD_CONTEXT_LR
#define THREAD_CONTEXT_SP_CPACR THREAD_CONTEXT_SP
#define THREAD_CONTEXT_FPCR_FPSR THREAD_CONTEXT_FPCR
#define EXCEPTION_CONTEXT_SIZE 0x120
#define EXCEPTION_CONTEXT_X0 0x000
#define EXCEPTION_CONTEXT_X1 0x008
#define EXCEPTION_CONTEXT_X2 0x010
#define EXCEPTION_CONTEXT_X3 0x018
#define EXCEPTION_CONTEXT_X4 0x020
#define EXCEPTION_CONTEXT_X5 0x028
#define EXCEPTION_CONTEXT_X6 0x030
#define EXCEPTION_CONTEXT_X7 0x038
#define EXCEPTION_CONTEXT_X8 0x040
#define EXCEPTION_CONTEXT_X9 0x048
#define EXCEPTION_CONTEXT_X10 0x050
#define EXCEPTION_CONTEXT_X11 0x058
#define EXCEPTION_CONTEXT_X12 0x060
#define EXCEPTION_CONTEXT_X13 0x068
#define EXCEPTION_CONTEXT_X14 0x070
#define EXCEPTION_CONTEXT_X15 0x078
#define EXCEPTION_CONTEXT_X16 0x080
#define EXCEPTION_CONTEXT_X17 0x088
#define EXCEPTION_CONTEXT_X18 0x090
#define EXCEPTION_CONTEXT_X19 0x098
#define EXCEPTION_CONTEXT_X20 0x0A0
#define EXCEPTION_CONTEXT_X21 0x0A8
#define EXCEPTION_CONTEXT_X22 0x0B0
#define EXCEPTION_CONTEXT_X23 0x0B8
#define EXCEPTION_CONTEXT_X24 0x0C0
#define EXCEPTION_CONTEXT_X25 0x0C8
#define EXCEPTION_CONTEXT_X26 0x0D0
#define EXCEPTION_CONTEXT_X27 0x0D8
#define EXCEPTION_CONTEXT_X28 0x0E0
#define EXCEPTION_CONTEXT_X29 0x0E8
#define EXCEPTION_CONTEXT_X30 0x0F0
#define EXCEPTION_CONTEXT_SP 0x0F8
#define EXCEPTION_CONTEXT_PC 0x100
#define EXCEPTION_CONTEXT_PSR 0x108
#define EXCEPTION_CONTEXT_TPIDR 0x110
#define EXCEPTION_CONTEXT_X0_X1 EXCEPTION_CONTEXT_X0
#define EXCEPTION_CONTEXT_X2_X3 EXCEPTION_CONTEXT_X2
#define EXCEPTION_CONTEXT_X4_X5 EXCEPTION_CONTEXT_X4
#define EXCEPTION_CONTEXT_X6_X7 EXCEPTION_CONTEXT_X6
#define EXCEPTION_CONTEXT_X8_X9 EXCEPTION_CONTEXT_X8
#define EXCEPTION_CONTEXT_X10_X11 EXCEPTION_CONTEXT_X10
#define EXCEPTION_CONTEXT_X12_X13 EXCEPTION_CONTEXT_X12
#define EXCEPTION_CONTEXT_X14_X15 EXCEPTION_CONTEXT_X14
#define EXCEPTION_CONTEXT_X16_X17 EXCEPTION_CONTEXT_X16
#define EXCEPTION_CONTEXT_X18_X19 EXCEPTION_CONTEXT_X18
#define EXCEPTION_CONTEXT_X20_X21 EXCEPTION_CONTEXT_X20
#define EXCEPTION_CONTEXT_X22_X23 EXCEPTION_CONTEXT_X22
#define EXCEPTION_CONTEXT_X24_X25 EXCEPTION_CONTEXT_X24
#define EXCEPTION_CONTEXT_X26_X27 EXCEPTION_CONTEXT_X26
#define EXCEPTION_CONTEXT_X28_X29 EXCEPTION_CONTEXT_X28
#define EXCEPTION_CONTEXT_X30_SP EXCEPTION_CONTEXT_X30
#define EXCEPTION_CONTEXT_PC_PSR EXCEPTION_CONTEXT_PC
#define EXCEPTION_CONTEXT_X9_X10 EXCEPTION_CONTEXT_X9
#define EXCEPTION_CONTEXT_X19_X20 EXCEPTION_CONTEXT_X19
#define EXCEPTION_CONTEXT_X21_X22 EXCEPTION_CONTEXT_X21
#define EXCEPTION_CONTEXT_X23_X24 EXCEPTION_CONTEXT_X23
#define EXCEPTION_CONTEXT_X25_X26 EXCEPTION_CONTEXT_X25
#define EXCEPTION_CONTEXT_X27_X28 EXCEPTION_CONTEXT_X27
#define EXCEPTION_CONTEXT_X29_X30 EXCEPTION_CONTEXT_X29
#define EXCEPTION_CONTEXT_SP_PC EXCEPTION_CONTEXT_SP
#define EXCEPTION_CONTEXT_PSR_TPIDR EXCEPTION_CONTEXT_PSR
#define THREAD_LOCAL_REGION_MESSAGE_BUFFER 0x000
#define THREAD_LOCAL_REGION_DISABLE_COUNT 0x100
#define THREAD_LOCAL_REGION_INTERRUPT_FLAG 0x102
#define THREAD_LOCAL_REGION_SIZE 0x200
#define INIT_ARGUMENTS_SIZE 0x60
#define INIT_ARGUMENTS_TTBR0 0x00
#define INIT_ARGUMENTS_TTBR1 0x08
#define INIT_ARGUMENTS_TCR 0x10
#define INIT_ARGUMENTS_MAIR 0x18
#define INIT_ARGUMENTS_CPUACTLR 0x20
#define INIT_ARGUMENTS_CPUECTLR 0x28
#define INIT_ARGUMENTS_SCTLR 0x30
#define INIT_ARGUMENTS_SP 0x38
#define INIT_ARGUMENTS_ENTRYPOINT 0x40
#define INIT_ARGUMENTS_ARGUMENT 0x48
#define INIT_ARGUMENTS_SETUP_FUNCTION 0x50
#define INIT_ARGUMENTS_EXCEPTION_STACK 0x58
#define KSCHEDULER_NEEDS_SCHEDULING 0x00
#define KSCHEDULER_INTERRUPT_TASK_THREAD_RUNNABLE 0x01
#define KSCHEDULER_HIGHEST_PRIORITY_THREAD 0x10
#define KSCHEDULER_IDLE_THREAD_STACK 0x18

View file

@ -43,6 +43,42 @@ namespace ams::kern::arch::arm64 {
} }
} }
}; };
static_assert(sizeof(KExceptionContext) == 0x120); static_assert(sizeof(KExceptionContext) == EXCEPTION_CONTEXT_SIZE);
static_assert(__builtin_offsetof(KExceptionContext, x[ 0]) == EXCEPTION_CONTEXT_X0);
static_assert(__builtin_offsetof(KExceptionContext, x[ 1]) == EXCEPTION_CONTEXT_X1);
static_assert(__builtin_offsetof(KExceptionContext, x[ 2]) == EXCEPTION_CONTEXT_X2);
static_assert(__builtin_offsetof(KExceptionContext, x[ 3]) == EXCEPTION_CONTEXT_X3);
static_assert(__builtin_offsetof(KExceptionContext, x[ 4]) == EXCEPTION_CONTEXT_X4);
static_assert(__builtin_offsetof(KExceptionContext, x[ 5]) == EXCEPTION_CONTEXT_X5);
static_assert(__builtin_offsetof(KExceptionContext, x[ 6]) == EXCEPTION_CONTEXT_X6);
static_assert(__builtin_offsetof(KExceptionContext, x[ 7]) == EXCEPTION_CONTEXT_X7);
static_assert(__builtin_offsetof(KExceptionContext, x[ 8]) == EXCEPTION_CONTEXT_X8);
static_assert(__builtin_offsetof(KExceptionContext, x[ 9]) == EXCEPTION_CONTEXT_X9);
static_assert(__builtin_offsetof(KExceptionContext, x[10]) == EXCEPTION_CONTEXT_X10);
static_assert(__builtin_offsetof(KExceptionContext, x[11]) == EXCEPTION_CONTEXT_X11);
static_assert(__builtin_offsetof(KExceptionContext, x[12]) == EXCEPTION_CONTEXT_X12);
static_assert(__builtin_offsetof(KExceptionContext, x[13]) == EXCEPTION_CONTEXT_X13);
static_assert(__builtin_offsetof(KExceptionContext, x[14]) == EXCEPTION_CONTEXT_X14);
static_assert(__builtin_offsetof(KExceptionContext, x[15]) == EXCEPTION_CONTEXT_X15);
static_assert(__builtin_offsetof(KExceptionContext, x[16]) == EXCEPTION_CONTEXT_X16);
static_assert(__builtin_offsetof(KExceptionContext, x[17]) == EXCEPTION_CONTEXT_X17);
static_assert(__builtin_offsetof(KExceptionContext, x[18]) == EXCEPTION_CONTEXT_X18);
static_assert(__builtin_offsetof(KExceptionContext, x[19]) == EXCEPTION_CONTEXT_X19);
static_assert(__builtin_offsetof(KExceptionContext, x[20]) == EXCEPTION_CONTEXT_X20);
static_assert(__builtin_offsetof(KExceptionContext, x[21]) == EXCEPTION_CONTEXT_X21);
static_assert(__builtin_offsetof(KExceptionContext, x[22]) == EXCEPTION_CONTEXT_X22);
static_assert(__builtin_offsetof(KExceptionContext, x[23]) == EXCEPTION_CONTEXT_X23);
static_assert(__builtin_offsetof(KExceptionContext, x[24]) == EXCEPTION_CONTEXT_X24);
static_assert(__builtin_offsetof(KExceptionContext, x[25]) == EXCEPTION_CONTEXT_X25);
static_assert(__builtin_offsetof(KExceptionContext, x[26]) == EXCEPTION_CONTEXT_X26);
static_assert(__builtin_offsetof(KExceptionContext, x[27]) == EXCEPTION_CONTEXT_X27);
static_assert(__builtin_offsetof(KExceptionContext, x[28]) == EXCEPTION_CONTEXT_X28);
static_assert(__builtin_offsetof(KExceptionContext, x[29]) == EXCEPTION_CONTEXT_X29);
static_assert(__builtin_offsetof(KExceptionContext, x[30]) == EXCEPTION_CONTEXT_X30);
static_assert(__builtin_offsetof(KExceptionContext, sp) == EXCEPTION_CONTEXT_SP);
static_assert(__builtin_offsetof(KExceptionContext, pc) == EXCEPTION_CONTEXT_PC);
static_assert(__builtin_offsetof(KExceptionContext, psr) == EXCEPTION_CONTEXT_PSR);
static_assert(__builtin_offsetof(KExceptionContext, tpidr) == EXCEPTION_CONTEXT_TPIDR);
} }

View file

@ -79,8 +79,38 @@ namespace ams::kern::arch::arm64 {
const u128 *GetFpuRegisters() const { return m_fpu_registers; } const u128 *GetFpuRegisters() const { return m_fpu_registers; }
public: public:
static void OnThreadTerminating(const KThread *thread); static void OnThreadTerminating(const KThread *thread);
public:
static consteval bool ValidateOffsets();
}; };
consteval bool KThreadContext::ValidateOffsets() {
static_assert(sizeof(KThreadContext) == THREAD_CONTEXT_SIZE);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.registers) == THREAD_CONTEXT_CPU_REGISTERS);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x19) == THREAD_CONTEXT_X19);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x20) == THREAD_CONTEXT_X20);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x21) == THREAD_CONTEXT_X21);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x22) == THREAD_CONTEXT_X22);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x23) == THREAD_CONTEXT_X23);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x24) == THREAD_CONTEXT_X24);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x25) == THREAD_CONTEXT_X25);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x26) == THREAD_CONTEXT_X26);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x27) == THREAD_CONTEXT_X27);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x28) == THREAD_CONTEXT_X28);
static_assert(__builtin_offsetof(KThreadContext, m_callee_saved.x29) == THREAD_CONTEXT_X29);
static_assert(__builtin_offsetof(KThreadContext, m_lr) == THREAD_CONTEXT_LR);
static_assert(__builtin_offsetof(KThreadContext, m_sp) == THREAD_CONTEXT_SP);
static_assert(__builtin_offsetof(KThreadContext, m_cpacr) == THREAD_CONTEXT_CPACR);
static_assert(__builtin_offsetof(KThreadContext, m_fpcr) == THREAD_CONTEXT_FPCR);
static_assert(__builtin_offsetof(KThreadContext, m_fpsr) == THREAD_CONTEXT_FPSR);
static_assert(__builtin_offsetof(KThreadContext, m_fpu_registers) == THREAD_CONTEXT_FPU_REGISTERS);
static_assert(__builtin_offsetof(KThreadContext, m_locked) == THREAD_CONTEXT_LOCKED);
return true;
}
static_assert(KThreadContext::ValidateOffsets());
void GetUserContext(ams::svc::ThreadContext *out, const KThread *thread); void GetUserContext(ams::svc::ThreadContext *out, const KThread *thread);
} }

View file

@ -194,8 +194,20 @@ namespace ams::kern {
static bool s_scheduler_update_needed; static bool s_scheduler_update_needed;
static KSchedulerPriorityQueue s_priority_queue; static KSchedulerPriorityQueue s_priority_queue;
static LockType s_scheduler_lock; static LockType s_scheduler_lock;
public:
static consteval bool ValidateAssemblyOffsets();
}; };
consteval bool KScheduler::ValidateAssemblyOffsets() {
static_assert(__builtin_offsetof(KScheduler, m_state.needs_scheduling) == KSCHEDULER_NEEDS_SCHEDULING);
static_assert(__builtin_offsetof(KScheduler, m_state.interrupt_task_thread_runnable) == KSCHEDULER_INTERRUPT_TASK_THREAD_RUNNABLE);
static_assert(__builtin_offsetof(KScheduler, m_state.highest_priority_thread) == KSCHEDULER_HIGHEST_PRIORITY_THREAD);
static_assert(__builtin_offsetof(KScheduler, m_state.idle_thread_stack) == KSCHEDULER_IDLE_THREAD_STACK);
return true;
}
static_assert(KScheduler::ValidateAssemblyOffsets());
class KScopedSchedulerLock : KScopedLock<KScheduler::LockType> { class KScopedSchedulerLock : KScopedLock<KScheduler::LockType> {
public: public:
explicit ALWAYS_INLINE KScopedSchedulerLock() : KScopedLock(KScheduler::s_scheduler_lock) { /* ... */ } explicit ALWAYS_INLINE KScopedSchedulerLock() : KScopedLock(KScheduler::s_scheduler_lock) { /* ... */ }

View file

@ -105,4 +105,10 @@ namespace ams::kern {
} }
}; };
/* Miscellaneous sanity checking. */
static_assert(ams::svc::ThreadLocalRegionSize == THREAD_LOCAL_REGION_SIZE);
static_assert(__builtin_offsetof(ams::svc::ThreadLocalRegion, message_buffer) == THREAD_LOCAL_REGION_MESSAGE_BUFFER);
static_assert(__builtin_offsetof(ams::svc::ThreadLocalRegion, disable_count) == THREAD_LOCAL_REGION_DISABLE_COUNT);
static_assert(__builtin_offsetof(ams::svc::ThreadLocalRegion, interrupt_flag) == THREAD_LOCAL_REGION_INTERRUPT_FLAG);
} }

View file

@ -20,7 +20,7 @@
namespace ams::kern::svc { namespace ams::kern::svc {
static constexpr size_t NumSupervisorCalls = 0xC0; static constexpr size_t NumSupervisorCalls = AMS_KERN_NUM_SUPERVISOR_CALLS;
#define AMS_KERN_SVC_DECLARE_ENUM_ID(ID, RETURN_TYPE, NAME, ...) \ #define AMS_KERN_SVC_DECLARE_ENUM_ID(ID, RETURN_TYPE, NAME, ...) \
SvcId_##NAME = ID, SvcId_##NAME = ID,

View file

@ -14,6 +14,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
#include <mesosphere/kern_build_config.hpp> #include <mesosphere/kern_build_config.hpp>
#include <mesosphere/kern_select_assembly_offsets.h>
#if defined(MESOSPHERE_ENABLE_PANIC_REGISTER_DUMP) #if defined(MESOSPHERE_ENABLE_PANIC_REGISTER_DUMP)
@ -32,28 +33,28 @@
\ \
/* Save x0/x1/sp to the context. */ \ /* Save x0/x1/sp to the context. */ \
ldr x1, [sp, #(8 * 0)]; \ ldr x1, [sp, #(8 * 0)]; \
str x1, [x0, #(8 * 0)]; \ str x1, [x0, #(EXCEPTION_CONTEXT_X0)]; \
ldr x1, [sp, #(8 * 1)]; \ ldr x1, [sp, #(8 * 1)]; \
str x1, [x0, #(8 * 1)]; \ str x1, [x0, #(EXCEPTION_CONTEXT_X1)]; \
\ \
/* Save all other registers to the context. */ \ /* Save all other registers to the context. */ \
stp x2, x3, [x0, #(8 * 2)]; \ stp x2, x3, [x0, #(EXCEPTION_CONTEXT_X2_X3)]; \
stp x4, x5, [x0, #(8 * 4)]; \ stp x4, x5, [x0, #(EXCEPTION_CONTEXT_X4_X5)]; \
stp x6, x7, [x0, #(8 * 6)]; \ stp x6, x7, [x0, #(EXCEPTION_CONTEXT_X6_X7)]; \
stp x8, x9, [x0, #(8 * 8)]; \ stp x8, x9, [x0, #(EXCEPTION_CONTEXT_X8_X9)]; \
stp x10, x11, [x0, #(8 * 10)]; \ stp x10, x11, [x0, #(EXCEPTION_CONTEXT_X10_X11)]; \
stp x12, x13, [x0, #(8 * 12)]; \ stp x12, x13, [x0, #(EXCEPTION_CONTEXT_X12_X13)]; \
stp x14, x15, [x0, #(8 * 14)]; \ stp x14, x15, [x0, #(EXCEPTION_CONTEXT_X14_X15)]; \
stp x16, x17, [x0, #(8 * 16)]; \ stp x16, x17, [x0, #(EXCEPTION_CONTEXT_X16_X17)]; \
stp x18, x19, [x0, #(8 * 18)]; \ stp x18, x19, [x0, #(EXCEPTION_CONTEXT_X18_X19)]; \
stp x20, x21, [x0, #(8 * 20)]; \ stp x20, x21, [x0, #(EXCEPTION_CONTEXT_X20_X21)]; \
stp x22, x23, [x0, #(8 * 22)]; \ stp x22, x23, [x0, #(EXCEPTION_CONTEXT_X22_X23)]; \
stp x24, x25, [x0, #(8 * 24)]; \ stp x24, x25, [x0, #(EXCEPTION_CONTEXT_X24_X25)]; \
stp x26, x27, [x0, #(8 * 26)]; \ stp x26, x27, [x0, #(EXCEPTION_CONTEXT_X26_X27)]; \
stp x28, x29, [x0, #(8 * 28)]; \ stp x28, x29, [x0, #(EXCEPTION_CONTEXT_X28_X29)]; \
\ \
add x1, sp, #16; \ add x1, sp, #16; \
stp x30, x1, [x0, #(8 * 30)]; \ stp x30, x1, [x0, #(EXCEPTION_CONTEXT_X30_SP)]; \
\ \
/* Restore x0/x1. */ \ /* Restore x0/x1. */ \
ldp x0, x1, [sp], #16; ldp x0, x1, [sp], #16;

View file

@ -21,15 +21,15 @@
.type _ZN3ams4kern3svc25CallReturnFromException64Ev, %function .type _ZN3ams4kern3svc25CallReturnFromException64Ev, %function
_ZN3ams4kern3svc25CallReturnFromException64Ev: _ZN3ams4kern3svc25CallReturnFromException64Ev:
/* Save registers the SVC entry handler didn't. */ /* Save registers the SVC entry handler didn't. */
stp x12, x13, [sp, #(8 * 12)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x14, x15, [sp, #(8 * 14)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x16, x17, [sp, #(8 * 16)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
str x19, [sp, #(8 * 19)] str x19, [sp, #(EXCEPTION_CONTEXT_X19)]
stp x20, x21, [sp, #(8 * 20)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x22, x23, [sp, #(8 * 22)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x24, x25, [sp, #(8 * 24)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x26, x26, [sp, #(8 * 26)] stp x26, x26, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(8 * 28)] stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
/* Call ams::kern::arch::arm64::ReturnFromException(result). */ /* Call ams::kern::arch::arm64::ReturnFromException(result). */
bl _ZN3ams4kern4arch5arm6419ReturnFromExceptionENS_6ResultE bl _ZN3ams4kern4arch5arm6419ReturnFromExceptionENS_6ResultE
@ -63,7 +63,7 @@ _ZN3ams4kern3svc14RestoreContextEm:
0: /* We should handle DPC. */ 0: /* We should handle DPC. */
/* Check the dpc flags. */ /* Check the dpc flags. */
ldrb w8, [sp, #(0x120 + THREAD_STACK_PARAMETERS_DPC_FLAGS)] ldrb w8, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_DPC_FLAGS)]
cbz w8, 1f cbz w8, 1f
/* We have DPC to do! */ /* We have DPC to do! */
@ -83,32 +83,32 @@ _ZN3ams4kern3svc14RestoreContextEm:
1: /* We're done with DPC, and should return from the svc. */ 1: /* We're done with DPC, and should return from the svc. */
/* Clear our in-SVC note. */ /* Clear our in-SVC note. */
strb wzr, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)] strb wzr, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)]
/* Restore registers. */ /* Restore registers. */
ldp x30, x8, [sp, #(8 * 30)] ldp x30, x8, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x9, x10, [sp, #(8 * 32)] ldp x9, x10, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x11, [sp, #(8 * 34)] ldr x11, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr sp_el0, x8 msr sp_el0, x8
msr elr_el1, x9 msr elr_el1, x9
msr spsr_el1, x10 msr spsr_el1, x10
msr tpidr_el0, x11 msr tpidr_el0, x11
ldp x0, x1, [sp, #(8 * 0)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x2, x3, [sp, #(8 * 2)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x4, x5, [sp, #(8 * 4)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x6, x7, [sp, #(8 * 6)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x8, x9, [sp, #(8 * 8)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x10, x11, [sp, #(8 * 10)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x12, x13, [sp, #(8 * 12)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x14, x15, [sp, #(8 * 14)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x16, x17, [sp, #(8 * 16)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x18, x19, [sp, #(8 * 18)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x20, x21, [sp, #(8 * 20)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
ldp x22, x23, [sp, #(8 * 22)] ldp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
ldp x24, x25, [sp, #(8 * 24)] ldp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
ldp x26, x27, [sp, #(8 * 26)] ldp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
ldp x28, x29, [sp, #(8 * 28)] ldp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
/* Return. */ /* Return. */
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
eret eret

View file

@ -22,45 +22,45 @@
.type _ZN3ams4kern4arch5arm6412SvcHandler64Ev, %function .type _ZN3ams4kern4arch5arm6412SvcHandler64Ev, %function
_ZN3ams4kern4arch5arm6412SvcHandler64Ev: _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
/* Create a KExceptionContext for the exception. */ /* Create a KExceptionContext for the exception. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Save registers needed for ReturnFromException */ /* Save registers needed for ReturnFromException */
stp x9, x10, [sp, #(8 * 9)] stp x9, x10, [sp, #(EXCEPTION_CONTEXT_X9_X10)]
str x11, [sp, #(8 * 11)] str x11, [sp, #(EXCEPTION_CONTEXT_X11)]
str x18, [sp, #(8 * 18)] str x18, [sp, #(EXCEPTION_CONTEXT_X18)]
mrs x8, sp_el0 mrs x8, sp_el0
mrs x9, elr_el1 mrs x9, elr_el1
mrs x10, spsr_el1 mrs x10, spsr_el1
mrs x11, tpidr_el0 mrs x11, tpidr_el0
ldr x18, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CUR_THREAD)] ldr x18, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CUR_THREAD)]
/* Save callee-saved registers. */ /* Save callee-saved registers. */
stp x19, x20, [sp, #(8 * 19)] stp x19, x20, [sp, #(EXCEPTION_CONTEXT_X19_X20)]
stp x21, x22, [sp, #(8 * 21)] stp x21, x22, [sp, #(EXCEPTION_CONTEXT_X21_X22)]
stp x23, x24, [sp, #(8 * 23)] stp x23, x24, [sp, #(EXCEPTION_CONTEXT_X23_X24)]
stp x25, x26, [sp, #(8 * 25)] stp x25, x26, [sp, #(EXCEPTION_CONTEXT_X25_X26)]
stp x27, x28, [sp, #(8 * 27)] stp x27, x28, [sp, #(EXCEPTION_CONTEXT_X27_X28)]
/* Save miscellaneous registers. */ /* Save miscellaneous registers. */
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x29, x30, [sp, #(8 * 29)] stp x29, x30, [sp, #(EXCEPTION_CONTEXT_X29_X30)]
stp x8, x9, [sp, #(8 * 31)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_SP_PC)]
stp x10, x11, [sp, #(8 * 33)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_PSR_TPIDR)]
/* Check if the SVC index is out of range. */ /* Check if the SVC index is out of range. */
mrs x8, esr_el1 mrs x8, esr_el1
and x8, x8, #0xFF and x8, x8, #0xFF
cmp x8, #0x80 cmp x8, #(AMS_KERN_NUM_SUPERVISOR_CALLS)
b.ge 3f b.ge 3f
/* Check the specific SVC permission bit for allowal. */ /* Check the specific SVC permission bit for allowal. */
mov x9, sp mov x9, sp
add x9, x9, x8, lsr#3 add x9, x9, x8, lsr#3
ldrb w9, [x9, #(0x120 + THREAD_STACK_PARAMETERS_SVC_PERMISSION)] ldrb w9, [x9, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_SVC_PERMISSION)]
and x10, x8, #0x7 and x10, x8, #0x7
lsr x10, x9, x10 lsr x10, x9, x10
tst x10, #1 tst x10, #1
@ -68,11 +68,11 @@ _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
/* Check if our disable count allows us to call SVCs. */ /* Check if our disable count allows us to call SVCs. */
mrs x10, tpidrro_el0 mrs x10, tpidrro_el0
ldrh w10, [x10, #0x100] ldrh w10, [x10, #(THREAD_LOCAL_REGION_DISABLE_COUNT)]
cbz w10, 1f cbz w10, 1f
/* It might not, so check the stack params to see if we must not allow the SVC. */ /* It might not, so check the stack params to see if we must not allow the SVC. */
ldrb w10, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_PINNED)] ldrb w10, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_PINNED)]
cbz w10, 3f cbz w10, 3f
1: /* We can call the SVC. */ 1: /* We can call the SVC. */
@ -82,8 +82,8 @@ _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
/* Note that we're calling the SVC. */ /* Note that we're calling the SVC. */
mov w10, #1 mov w10, #1
strb w10, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)] strb w10, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)]
strb w8, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CURRENT_SVC_ID)] strb w8, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CURRENT_SVC_ID)]
/* If we should, trace the svc entry. */ /* If we should, trace the svc entry. */
#if defined(MESOSPHERE_BUILD_FOR_TRACING) #if defined(MESOSPHERE_BUILD_FOR_TRACING)
@ -110,7 +110,7 @@ _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
2: /* We completed the SVC, and we should handle DPC. */ 2: /* We completed the SVC, and we should handle DPC. */
/* Check the dpc flags. */ /* Check the dpc flags. */
ldrb w8, [sp, #(0x120 + THREAD_STACK_PARAMETERS_DPC_FLAGS)] ldrb w8, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_DPC_FLAGS)]
cbz w8, 4f cbz w8, 4f
/* We have DPC to do! */ /* We have DPC to do! */
@ -130,57 +130,57 @@ _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
3: /* Invalid SVC. */ 3: /* Invalid SVC. */
/* Setup the context to call into HandleException. */ /* Setup the context to call into HandleException. */
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp xzr, xzr, [sp, #(8 * 8)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp xzr, xzr, [sp, #(8 * 10)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp xzr, xzr, [sp, #(8 * 12)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp xzr, xzr, [sp, #(8 * 14)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp xzr, xzr, [sp, #(8 * 16)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
str x19, [sp, #(8 * 19)] str x19, [sp, #(EXCEPTION_CONTEXT_X19)]
stp x20, x21, [sp, #(8 * 20)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x22, x23, [sp, #(8 * 22)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x24, x25, [sp, #(8 * 24)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x26, x27, [sp, #(8 * 26)] stp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(8 * 28)] stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
/* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */ /* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */
mov x0, sp mov x0, sp
bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE
/* Restore registers. */ /* Restore registers. */
ldp x30, x8, [sp, #(8 * 30)] ldp x30, x8, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x9, x10, [sp, #(8 * 32)] ldp x9, x10, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x11, [sp, #(8 * 34)] ldr x11, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr sp_el0, x8 msr sp_el0, x8
msr elr_el1, x9 msr elr_el1, x9
msr spsr_el1, x10 msr spsr_el1, x10
msr tpidr_el0, x11 msr tpidr_el0, x11
ldp x0, x1, [sp, #(8 * 0)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x2, x3, [sp, #(8 * 2)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x4, x5, [sp, #(8 * 4)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x6, x7, [sp, #(8 * 6)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x8, x9, [sp, #(8 * 8)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x10, x11, [sp, #(8 * 10)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x12, x13, [sp, #(8 * 12)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x14, x15, [sp, #(8 * 14)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x16, x17, [sp, #(8 * 16)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x18, x19, [sp, #(8 * 18)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x20, x21, [sp, #(8 * 20)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
ldp x22, x23, [sp, #(8 * 22)] ldp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
ldp x24, x25, [sp, #(8 * 24)] ldp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
ldp x26, x27, [sp, #(8 * 26)] ldp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
ldp x28, x29, [sp, #(8 * 28)] ldp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
/* Return. */ /* Return. */
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
eret eret
4: /* Return from SVC. */ 4: /* Return from SVC. */
/* Clear our in-SVC note. */ /* Clear our in-SVC note. */
strb wzr, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)] strb wzr, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)]
/* If we should, trace the svc exit. */ /* If we should, trace the svc exit. */
#if defined(MESOSPHERE_BUILD_FOR_TRACING) #if defined(MESOSPHERE_BUILD_FOR_TRACING)
@ -199,10 +199,10 @@ _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
#endif #endif
/* Restore registers. */ /* Restore registers. */
ldp x30, x8, [sp, #(8 * 30)] ldp x30, x8, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x9, x10, [sp, #(8 * 32)] ldp x9, x10, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x11, [sp, #(8 * 34)] ldr x11, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
ldr x18, [sp, #(8 * 18)] ldr x18, [sp, #(EXCEPTION_CONTEXT_X18)]
msr sp_el0, x8 msr sp_el0, x8
msr elr_el1, x9 msr elr_el1, x9
msr spsr_el1, x10 msr spsr_el1, x10
@ -221,7 +221,7 @@ _ZN3ams4kern4arch5arm6412SvcHandler64Ev:
mov x17, xzr mov x17, xzr
/* Return. */ /* Return. */
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
eret eret
/* ams::kern::arch::arm64::SvcHandler32() */ /* ams::kern::arch::arm64::SvcHandler32() */
@ -240,36 +240,36 @@ _ZN3ams4kern4arch5arm6412SvcHandler32Ev:
mov w7, w7 mov w7, w7
/* Create a KExceptionContext for the exception. */ /* Create a KExceptionContext for the exception. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Save system registers */ /* Save system registers */
mrs x17, elr_el1 mrs x17, elr_el1
mrs x20, spsr_el1 mrs x20, spsr_el1
mrs x19, tpidr_el0 mrs x19, tpidr_el0
ldr x18, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CUR_THREAD)] ldr x18, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CUR_THREAD)]
stp x17, x20, [sp, #(8 * 32)] stp x17, x20, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
str x19, [sp, #(8 * 34)] str x19, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
/* Save registers. */ /* Save registers. */
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x8, x9, [sp, #(8 * 8)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x10, x11, [sp, #(8 * 10)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x12, x13, [sp, #(8 * 12)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x14, xzr, [sp, #(8 * 14)] stp x14, xzr, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
/* Check if the SVC index is out of range. */ /* Check if the SVC index is out of range. */
mrs x16, esr_el1 mrs x16, esr_el1
and x16, x16, #0xFF and x16, x16, #0xFF
cmp x16, #0x80 cmp x16, #(AMS_KERN_NUM_SUPERVISOR_CALLS)
b.ge 3f b.ge 3f
/* Check the specific SVC permission bit for allowal. */ /* Check the specific SVC permission bit for allowal. */
mov x20, sp mov x20, sp
add x20, x20, x16, lsr#3 add x20, x20, x16, lsr#3
ldrb w20, [x20, #(0x120 + THREAD_STACK_PARAMETERS_SVC_PERMISSION)] ldrb w20, [x20, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_SVC_PERMISSION)]
and x17, x16, #0x7 and x17, x16, #0x7
lsr x17, x20, x17 lsr x17, x20, x17
tst x17, #1 tst x17, #1
@ -277,11 +277,11 @@ _ZN3ams4kern4arch5arm6412SvcHandler32Ev:
/* Check if our disable count allows us to call SVCs. */ /* Check if our disable count allows us to call SVCs. */
mrs x15, tpidrro_el0 mrs x15, tpidrro_el0
ldrh w15, [x15, #0x100] ldrh w15, [x15, #(THREAD_LOCAL_REGION_DISABLE_COUNT)]
cbz w15, 1f cbz w15, 1f
/* It might not, so check the stack params to see if we must not allow the SVC. */ /* It might not, so check the stack params to see if we must not allow the SVC. */
ldrb w15, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_PINNED)] ldrb w15, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_PINNED)]
cbz w15, 3f cbz w15, 3f
1: /* We can call the SVC. */ 1: /* We can call the SVC. */
@ -291,8 +291,8 @@ _ZN3ams4kern4arch5arm6412SvcHandler32Ev:
/* Note that we're calling the SVC. */ /* Note that we're calling the SVC. */
mov w15, #1 mov w15, #1
strb w15, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)] strb w15, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)]
strb w16, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CURRENT_SVC_ID)] strb w16, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CURRENT_SVC_ID)]
/* If we should, trace the svc entry. */ /* If we should, trace the svc entry. */
#if defined(MESOSPHERE_BUILD_FOR_TRACING) #if defined(MESOSPHERE_BUILD_FOR_TRACING)
@ -319,7 +319,7 @@ _ZN3ams4kern4arch5arm6412SvcHandler32Ev:
2: /* We completed the SVC, and we should handle DPC. */ 2: /* We completed the SVC, and we should handle DPC. */
/* Check the dpc flags. */ /* Check the dpc flags. */
ldrb w16, [sp, #(0x120 + THREAD_STACK_PARAMETERS_DPC_FLAGS)] ldrb w16, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_DPC_FLAGS)]
cbz w16, 4f cbz w16, 4f
/* We have DPC to do! */ /* We have DPC to do! */
@ -339,45 +339,45 @@ _ZN3ams4kern4arch5arm6412SvcHandler32Ev:
3: /* Invalid SVC. */ 3: /* Invalid SVC. */
/* Setup the context to call into HandleException. */ /* Setup the context to call into HandleException. */
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp xzr, xzr, [sp, #(8 * 16)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp xzr, xzr, [sp, #(8 * 18)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp xzr, xzr, [sp, #(8 * 20)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp xzr, xzr, [sp, #(8 * 22)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp xzr, xzr, [sp, #(8 * 24)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp xzr, xzr, [sp, #(8 * 26)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp xzr, xzr, [sp, #(8 * 28)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
stp xzr, xzr, [sp, #(8 * 30)] stp xzr, xzr, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
/* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */ /* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */
mov x0, sp mov x0, sp
bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE
/* Restore registers. */ /* Restore registers. */
ldp x17, x20, [sp, #(8 * 32)] ldp x17, x20, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x19, [sp, #(8 * 34)] ldr x19, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr elr_el1, x17 msr elr_el1, x17
msr spsr_el1, x20 msr spsr_el1, x20
msr tpidr_el0, x19 msr tpidr_el0, x19
ldp x0, x1, [sp, #(8 * 0)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x2, x3, [sp, #(8 * 2)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x4, x5, [sp, #(8 * 4)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x6, x7, [sp, #(8 * 6)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x8, x9, [sp, #(8 * 8)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x10, x11, [sp, #(8 * 10)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x12, x13, [sp, #(8 * 12)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x14, x15, [sp, #(8 * 14)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
/* Return. */ /* Return. */
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
eret eret
4: /* Return from SVC. */ 4: /* Return from SVC. */
/* Clear our in-SVC note. */ /* Clear our in-SVC note. */
strb wzr, [sp, #(0x120 + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)] strb wzr, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_IS_CALLING_SVC)]
/* If we should, trace the svc exit. */ /* If we should, trace the svc exit. */
#if defined(MESOSPHERE_BUILD_FOR_TRACING) #if defined(MESOSPHERE_BUILD_FOR_TRACING)
@ -396,16 +396,16 @@ _ZN3ams4kern4arch5arm6412SvcHandler32Ev:
#endif #endif
/* Restore registers. */ /* Restore registers. */
ldp x8, x9, [sp, #(8 * 8)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x10, x11, [sp, #(8 * 10)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x12, x13, [sp, #(8 * 12)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x14, xzr, [sp, #(8 * 14)] ldp x14, xzr, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x17, x20, [sp, #(8 * 32)] ldp x17, x20, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x19, [sp, #(8 * 34)] ldr x19, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr elr_el1, x17 msr elr_el1, x17
msr spsr_el1, x20 msr spsr_el1, x20
msr tpidr_el0, x19 msr tpidr_el0, x19
/* Return. */ /* Return. */
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
eret eret

View file

@ -14,7 +14,6 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
/* For some reason GAS doesn't know about it, even with .cpu cortex-a57 */ /* For some reason GAS doesn't know about it, even with .cpu cortex-a57 */
#define cpuactlr_el1 s3_1_c15_c2_0 #define cpuactlr_el1 s3_1_c15_c2_0
#define cpuectlr_el1 s3_1_c15_c2_1 #define cpuectlr_el1 s3_1_c15_c2_1

View file

@ -13,6 +13,7 @@
* You should have received a copy of the GNU General Public License * You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
#include <mesosphere/kern_select_assembly_offsets.h>
/* For some reason GAS doesn't know about it, even with .cpu cortex-a57 */ /* For some reason GAS doesn't know about it, even with .cpu cortex-a57 */
#define cpuactlr_el1 s3_1_c15_c2_0 #define cpuactlr_el1 s3_1_c15_c2_0
@ -157,13 +158,13 @@ othercore_el1:
bl _ZN3ams4kern4init19DisableMmuAndCachesEv bl _ZN3ams4kern4init19DisableMmuAndCachesEv
/* Setup system registers using values from our KInitArguments. */ /* Setup system registers using values from our KInitArguments. */
ldr x1, [x20, #0x00] ldr x1, [x20, #(INIT_ARGUMENTS_TTBR0)]
msr ttbr0_el1, x1 msr ttbr0_el1, x1
ldr x1, [x20, #0x08] ldr x1, [x20, #(INIT_ARGUMENTS_TTBR1)]
msr ttbr1_el1, x1 msr ttbr1_el1, x1
ldr x1, [x20, #0x10] ldr x1, [x20, #(INIT_ARGUMENTS_TCR)]
msr tcr_el1, x1 msr tcr_el1, x1
ldr x1, [x20, #0x18] ldr x1, [x20, #(INIT_ARGUMENTS_MAIR)]
msr mair_el1, x1 msr mair_el1, x1
/* Perform cpu-specific setup. */ /* Perform cpu-specific setup. */
@ -179,9 +180,9 @@ othercore_el1:
b othercore_cpu_specific_setup_end b othercore_cpu_specific_setup_end
othercore_cpu_specific_setup_cortex_a57: othercore_cpu_specific_setup_cortex_a57:
othercore_cpu_specific_setup_cortex_a53: othercore_cpu_specific_setup_cortex_a53:
ldr x1, [x20, #0x20] ldr x1, [x20, #(INIT_ARGUMENTS_CPUACTLR)]
msr cpuactlr_el1, x1 msr cpuactlr_el1, x1
ldr x1, [x20, #0x28] ldr x1, [x20, #(INIT_ARGUMENTS_CPUECTLR)]
msr cpuectlr_el1, x1 msr cpuectlr_el1, x1
othercore_cpu_specific_setup_end: othercore_cpu_specific_setup_end:
@ -190,14 +191,14 @@ othercore_cpu_specific_setup_end:
isb isb
/* Set sctlr_el1 and ensure instruction consistency. */ /* Set sctlr_el1 and ensure instruction consistency. */
ldr x1, [x20, #0x30] ldr x1, [x20, #(INIT_ARGUMENTS_SCTLR)]
msr sctlr_el1, x1 msr sctlr_el1, x1
dsb sy dsb sy
isb isb
/* Jump to the virtual address equivalent to ams::kern::init::InvokeEntrypoint */ /* Jump to the virtual address equivalent to ams::kern::init::InvokeEntrypoint */
ldr x1, [x20, #0x50] ldr x1, [x20, #(INIT_ARGUMENTS_SETUP_FUNCTION)]
adr x2, _ZN3ams4kern4init14StartOtherCoreEPKNS1_14KInitArgumentsE adr x2, _ZN3ams4kern4init14StartOtherCoreEPKNS1_14KInitArgumentsE
sub x1, x1, x2 sub x1, x1, x2
adr x2, _ZN3ams4kern4init16InvokeEntrypointEPKNS1_14KInitArgumentsE adr x2, _ZN3ams4kern4init16InvokeEntrypointEPKNS1_14KInitArgumentsE
@ -218,7 +219,7 @@ _ZN3ams4kern4init16InvokeEntrypointEPKNS1_14KInitArgumentsE:
isb isb
/* Setup the stack pointer. */ /* Setup the stack pointer. */
ldr x1, [x20, #0x38] ldr x1, [x20, #(INIT_ARGUMENTS_SP)]
mov sp, x1 mov sp, x1
/* Ensure that system debug registers are setup. */ /* Ensure that system debug registers are setup. */
@ -228,12 +229,12 @@ _ZN3ams4kern4init16InvokeEntrypointEPKNS1_14KInitArgumentsE:
bl _ZN3ams4kern4init26InitializeExceptionVectorsEv bl _ZN3ams4kern4init26InitializeExceptionVectorsEv
/* Setup the exception stack in cntv_cval_el0. */ /* Setup the exception stack in cntv_cval_el0. */
ldr x1, [x20, #0x58] ldr x1, [x20, #(INIT_ARGUMENTS_EXCEPTION_STACK)]
msr cntv_cval_el0, x1 msr cntv_cval_el0, x1
/* Jump to the entrypoint. */ /* Jump to the entrypoint. */
ldr x1, [x20, #0x40] ldr x1, [x20, #(INIT_ARGUMENTS_ENTRYPOINT)]
ldr x0, [x20, #0x48] ldr x0, [x20, #(INIT_ARGUMENTS_ARGUMENT)]
br x1 br x1

View file

@ -72,62 +72,66 @@ _ZN3ams4kern4arch5arm6422EL1IrqExceptionHandlerEv:
.type _ZN3ams4kern4arch5arm6422EL0IrqExceptionHandlerEv, %function .type _ZN3ams4kern4arch5arm6422EL0IrqExceptionHandlerEv, %function
_ZN3ams4kern4arch5arm6422EL0IrqExceptionHandlerEv: _ZN3ams4kern4arch5arm6422EL0IrqExceptionHandlerEv:
/* Save registers that need saving. */ /* Save registers that need saving. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x8, x9, [sp, #(8 * 8)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x10, x11, [sp, #(8 * 10)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x12, x13, [sp, #(8 * 12)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x14, x15, [sp, #(8 * 14)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x16, x17, [sp, #(8 * 16)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp x18, x19, [sp, #(8 * 18)] stp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp x20, x21, [sp, #(8 * 20)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x22, x23, [sp, #(8 * 22)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x24, x25, [sp, #(8 * 24)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x26, x27, [sp, #(8 * 26)] stp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(8 * 28)] stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
mrs x20, sp_el0 mrs x20, sp_el0
mrs x21, elr_el1 mrs x21, elr_el1
mrs x22, spsr_el1 mrs x22, spsr_el1
mrs x23, tpidr_el0 mrs x23, tpidr_el0
mov w22, w22 mov w22, w22
stp x30, x20, [sp, #(8 * 30)]
stp x21, x22, [sp, #(8 * 32)] stp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
str x23, [sp, #(8 * 34)] stp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
str x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
/* Invoke KInterruptManager::HandleInterrupt(bool user_mode). */ /* Invoke KInterruptManager::HandleInterrupt(bool user_mode). */
ldr x18, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CUR_THREAD)] ldr x18, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CUR_THREAD)]
mov x0, #1 mov x0, #1
bl _ZN3ams4kern4arch5arm6417KInterruptManager15HandleInterruptEb bl _ZN3ams4kern4arch5arm6417KInterruptManager15HandleInterruptEb
/* Restore state from the context. */ /* Restore state from the context. */
ldp x30, x20, [sp, #(8 * 30)] ldp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x21, x22, [sp, #(8 * 32)] ldp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x23, [sp, #(8 * 34)] ldr x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr sp_el0, x20 msr sp_el0, x20
msr elr_el1, x21 msr elr_el1, x21
msr spsr_el1, x22 msr spsr_el1, x22
msr tpidr_el0, x23 msr tpidr_el0, x23
ldp x0, x1, [sp, #(8 * 0)]
ldp x2, x3, [sp, #(8 * 2)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x4, x5, [sp, #(8 * 4)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x6, x7, [sp, #(8 * 6)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x8, x9, [sp, #(8 * 8)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x10, x11, [sp, #(8 * 10)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x12, x13, [sp, #(8 * 12)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x14, x15, [sp, #(8 * 14)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x16, x17, [sp, #(8 * 16)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x18, x19, [sp, #(8 * 18)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x20, x21, [sp, #(8 * 20)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x22, x23, [sp, #(8 * 22)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
ldp x24, x25, [sp, #(8 * 24)] ldp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
ldp x26, x27, [sp, #(8 * 26)] ldp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
ldp x28, x29, [sp, #(8 * 28)] ldp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
add sp, sp, #0x120 ldp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Return from the exception. */ /* Return from the exception. */
eret eret
@ -171,60 +175,65 @@ _ZN3ams4kern4arch5arm6430EL0SynchronousExceptionHandlerEv:
ldp x16, x17, [sp], 16 ldp x16, x17, [sp], 16
/* Create a KExceptionContext to pass to HandleException. */ /* Create a KExceptionContext to pass to HandleException. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x8, x9, [sp, #(8 * 8)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x10, x11, [sp, #(8 * 10)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x12, x13, [sp, #(8 * 12)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x14, x15, [sp, #(8 * 14)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x16, x17, [sp, #(8 * 16)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp x18, x19, [sp, #(8 * 18)] stp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp x20, x21, [sp, #(8 * 20)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x22, x23, [sp, #(8 * 22)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x24, x25, [sp, #(8 * 24)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x26, x27, [sp, #(8 * 26)] stp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(8 * 28)] stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
mrs x20, sp_el0 mrs x20, sp_el0
mrs x21, elr_el1 mrs x21, elr_el1
mrs x22, spsr_el1 mrs x22, spsr_el1
mrs x23, tpidr_el0 mrs x23, tpidr_el0
mov w22, w22 mov w22, w22
stp x30, x20, [sp, #(8 * 30)]
stp x21, x22, [sp, #(8 * 32)] stp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
str x23, [sp, #(8 * 34)] stp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
str x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
/* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */ /* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */
ldr x18, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CUR_THREAD)] ldr x18, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CUR_THREAD)]
mov x0, sp mov x0, sp
bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE
/* Restore state from the context. */ /* Restore state from the context. */
ldp x30, x20, [sp, #(8 * 30)] ldp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x21, x22, [sp, #(8 * 32)] ldp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x23, [sp, #(8 * 34)] ldr x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr sp_el0, x20 msr sp_el0, x20
msr elr_el1, x21 msr elr_el1, x21
msr spsr_el1, x22 msr spsr_el1, x22
msr tpidr_el0, x23 msr tpidr_el0, x23
ldp x0, x1, [sp, #(8 * 0)]
ldp x2, x3, [sp, #(8 * 2)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x4, x5, [sp, #(8 * 4)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x6, x7, [sp, #(8 * 6)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x8, x9, [sp, #(8 * 8)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x10, x11, [sp, #(8 * 10)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x12, x13, [sp, #(8 * 12)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x14, x15, [sp, #(8 * 14)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x16, x17, [sp, #(8 * 16)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x18, x19, [sp, #(8 * 18)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x20, x21, [sp, #(8 * 20)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x22, x23, [sp, #(8 * 22)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
ldp x24, x25, [sp, #(8 * 24)] ldp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
ldp x26, x27, [sp, #(8 * 26)] ldp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
ldp x28, x29, [sp, #(8 * 28)] ldp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
add sp, sp, #0x120 ldp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Return from the exception. */ /* Return from the exception. */
eret eret
@ -344,30 +353,33 @@ _ZN3ams4kern4arch5arm6430EL1SynchronousExceptionHandlerEv:
ldr x1, [sp, #16] ldr x1, [sp, #16]
/* Create a KExceptionContext to pass to HandleException. */ /* Create a KExceptionContext to pass to HandleException. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
stp x0, x1, [sp, #(8 * 0)]
stp x2, x3, [sp, #(8 * 2)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x4, x5, [sp, #(8 * 4)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x6, x7, [sp, #(8 * 6)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x8, x9, [sp, #(8 * 8)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x10, x11, [sp, #(8 * 10)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x12, x13, [sp, #(8 * 12)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x14, x15, [sp, #(8 * 14)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x16, x17, [sp, #(8 * 16)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x18, x19, [sp, #(8 * 18)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp x20, x21, [sp, #(8 * 20)] stp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp x22, x23, [sp, #(8 * 22)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x24, x25, [sp, #(8 * 24)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x26, x27, [sp, #(8 * 26)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x28, x29, [sp, #(8 * 28)] stp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
mrs x20, sp_el0 mrs x20, sp_el0
mrs x21, elr_el1 mrs x21, elr_el1
mrs x22, spsr_el1 mrs x22, spsr_el1
mrs x23, tpidr_el0 mrs x23, tpidr_el0
mov w22, w22 mov w22, w22
stp x30, x20, [sp, #(8 * 30)]
stp x21, x22, [sp, #(8 * 32)] stp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
str x23, [sp, #(8 * 34)] stp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
str x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
/* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */ /* Call ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *) */
mov x0, sp mov x0, sp
@ -421,53 +433,53 @@ _ZN3ams4kern4arch5arm6430EL1SynchronousExceptionHandlerEv:
.type _ZN3ams4kern4arch5arm6425FpuAccessExceptionHandlerEv, %function .type _ZN3ams4kern4arch5arm6425FpuAccessExceptionHandlerEv, %function
_ZN3ams4kern4arch5arm6425FpuAccessExceptionHandlerEv: _ZN3ams4kern4arch5arm6425FpuAccessExceptionHandlerEv:
/* Save registers that need saving. */ /* Save registers that need saving. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x8, x9, [sp, #(8 * 8)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x10, x11, [sp, #(8 * 10)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x12, x13, [sp, #(8 * 12)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x14, x15, [sp, #(8 * 14)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x16, x17, [sp, #(8 * 16)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp x18, x19, [sp, #(8 * 18)] stp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp x20, x21, [sp, #(8 * 20)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
mrs x19, sp_el0 mrs x19, sp_el0
mrs x20, elr_el1 mrs x20, elr_el1
mrs x21, spsr_el1 mrs x21, spsr_el1
mov w21, w21 mov w21, w21
stp x30, x19, [sp, #(8 * 30)] stp x30, x19, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
stp x20, x21, [sp, #(8 * 32)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
/* Invoke the FPU context switch handler. */ /* Invoke the FPU context switch handler. */
ldr x18, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CUR_THREAD)] ldr x18, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CUR_THREAD)]
bl _ZN3ams4kern4arch5arm6423FpuContextSwitchHandlerEv bl _ZN3ams4kern4arch5arm6423FpuContextSwitchHandlerEv
/* Restore registers that we saved. */ /* Restore registers that we saved. */
ldp x30, x19, [sp, #(8 * 30)] ldp x30, x19, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x20, x21, [sp, #(8 * 32)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
msr sp_el0, x19 msr sp_el0, x19
msr elr_el1, x20 msr elr_el1, x20
msr spsr_el1, x21 msr spsr_el1, x21
ldp x0, x1, [sp, #(8 * 0)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x2, x3, [sp, #(8 * 2)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x4, x5, [sp, #(8 * 4)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x6, x7, [sp, #(8 * 6)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x8, x9, [sp, #(8 * 8)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x10, x11, [sp, #(8 * 10)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x12, x13, [sp, #(8 * 12)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x14, x15, [sp, #(8 * 14)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x16, x17, [sp, #(8 * 16)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x18, x19, [sp, #(8 * 18)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x20, x21, [sp, #(8 * 20)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Return from the exception. */ /* Return from the exception. */
eret eret
@ -495,30 +507,32 @@ _ZN3ams4kern4arch5arm6421EL1SystemErrorHandlerEv:
mrs x0, tpidr_el1 mrs x0, tpidr_el1
/* Create a KExceptionContext to pass to HandleException. */ /* Create a KExceptionContext to pass to HandleException. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
stp x0, x1, [sp, #(8 * 0)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x2, x3, [sp, #(8 * 2)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x4, x5, [sp, #(8 * 4)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x6, x7, [sp, #(8 * 6)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x8, x9, [sp, #(8 * 8)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x10, x11, [sp, #(8 * 10)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x12, x13, [sp, #(8 * 12)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x14, x15, [sp, #(8 * 14)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x16, x17, [sp, #(8 * 16)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp x18, x19, [sp, #(8 * 18)] stp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp x20, x21, [sp, #(8 * 20)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x22, x23, [sp, #(8 * 22)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x24, x25, [sp, #(8 * 24)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x26, x27, [sp, #(8 * 26)] stp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(8 * 28)] stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
mrs x20, sp_el0 mrs x20, sp_el0
mrs x21, elr_el1 mrs x21, elr_el1
mrs x22, spsr_el1 mrs x22, spsr_el1
mrs x23, tpidr_el0 mrs x23, tpidr_el0
mov w22, w22 mov w22, w22
stp x30, x20, [sp, #(8 * 30)]
stp x21, x22, [sp, #(8 * 32)] stp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
str x23, [sp, #(8 * 34)] stp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
str x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
/* Invoke ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *). */ /* Invoke ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *). */
mov x0, sp mov x0, sp
@ -533,60 +547,66 @@ _ZN3ams4kern4arch5arm6421EL1SystemErrorHandlerEv:
.type _ZN3ams4kern4arch5arm6421EL0SystemErrorHandlerEv, %function .type _ZN3ams4kern4arch5arm6421EL0SystemErrorHandlerEv, %function
_ZN3ams4kern4arch5arm6421EL0SystemErrorHandlerEv: _ZN3ams4kern4arch5arm6421EL0SystemErrorHandlerEv:
/* Create a KExceptionContext to pass to HandleException. */ /* Create a KExceptionContext to pass to HandleException. */
sub sp, sp, #0x120 sub sp, sp, #(EXCEPTION_CONTEXT_SIZE)
stp x0, x1, [sp, #(8 * 0)]
stp x2, x3, [sp, #(8 * 2)] stp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
stp x4, x5, [sp, #(8 * 4)] stp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
stp x6, x7, [sp, #(8 * 6)] stp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
stp x8, x9, [sp, #(8 * 8)] stp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
stp x10, x11, [sp, #(8 * 10)] stp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
stp x12, x13, [sp, #(8 * 12)] stp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
stp x14, x15, [sp, #(8 * 14)] stp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
stp x16, x17, [sp, #(8 * 16)] stp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
stp x18, x19, [sp, #(8 * 18)] stp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
stp x20, x21, [sp, #(8 * 20)] stp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
stp x22, x23, [sp, #(8 * 22)] stp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
stp x24, x25, [sp, #(8 * 24)] stp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
stp x26, x27, [sp, #(8 * 26)] stp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
stp x28, x29, [sp, #(8 * 28)] stp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
stp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
mrs x20, sp_el0 mrs x20, sp_el0
mrs x21, elr_el1 mrs x21, elr_el1
mrs x22, spsr_el1 mrs x22, spsr_el1
mrs x23, tpidr_el0 mrs x23, tpidr_el0
mov w22, w22 mov w22, w22
stp x30, x20, [sp, #(8 * 30)]
stp x21, x22, [sp, #(8 * 32)] stp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
str x23, [sp, #(8 * 34)] stp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
str x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
/* Invoke ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *). */ /* Invoke ams::kern::arch::arm64::HandleException(ams::kern::arch::arm64::KExceptionContext *). */
ldr x18, [sp, #(0x120 + THREAD_STACK_PARAMETERS_CUR_THREAD)] ldr x18, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_CUR_THREAD)]
mov x0, sp mov x0, sp
bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE bl _ZN3ams4kern4arch5arm6415HandleExceptionEPNS2_17KExceptionContextE
/* Restore state from the context. */ /* Restore state from the context. */
ldp x30, x20, [sp, #(8 * 30)] ldp x30, x20, [sp, #(EXCEPTION_CONTEXT_X30_SP)]
ldp x21, x22, [sp, #(8 * 32)] ldp x21, x22, [sp, #(EXCEPTION_CONTEXT_PC_PSR)]
ldr x23, [sp, #(8 * 34)] ldr x23, [sp, #(EXCEPTION_CONTEXT_TPIDR)]
msr sp_el0, x20 msr sp_el0, x20
msr elr_el1, x21 msr elr_el1, x21
msr spsr_el1, x22 msr spsr_el1, x22
msr tpidr_el0, x23 msr tpidr_el0, x23
ldp x0, x1, [sp, #(8 * 0)]
ldp x2, x3, [sp, #(8 * 2)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x4, x5, [sp, #(8 * 4)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x6, x7, [sp, #(8 * 6)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x8, x9, [sp, #(8 * 8)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x10, x11, [sp, #(8 * 10)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x12, x13, [sp, #(8 * 12)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x14, x15, [sp, #(8 * 14)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x16, x17, [sp, #(8 * 16)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x18, x19, [sp, #(8 * 18)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x20, x21, [sp, #(8 * 20)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x22, x23, [sp, #(8 * 22)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
ldp x24, x25, [sp, #(8 * 24)] ldp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
ldp x26, x27, [sp, #(8 * 26)] ldp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
ldp x28, x29, [sp, #(8 * 28)] ldp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
add sp, sp, #0x120 ldp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Return from the exception. */ /* Return from the exception. */
eret eret

View file

@ -15,88 +15,88 @@
*/ */
#include <mesosphere/kern_select_assembly_offsets.h> #include <mesosphere/kern_select_assembly_offsets.h>
#define SAVE_THREAD_CONTEXT(ctx, tmp0, tmp1, done_label) \ #define SAVE_THREAD_CONTEXT(ctx, tmp0, tmp1, done_label) \
/* Save the callee save registers + SP and cpacr. */ \ /* Save the callee save registers + SP and cpacr. */ \
mov tmp0, sp; \ mov tmp0, sp; \
mrs tmp1, cpacr_el1; \ mrs tmp1, cpacr_el1; \
stp x19, x20, [ctx, #(8 * 0)]; \ stp x19, x20, [ctx, #(THREAD_CONTEXT_X19_X20)]; \
stp x21, x22, [ctx, #(8 * 2)]; \ stp x21, x22, [ctx, #(THREAD_CONTEXT_X21_X22)]; \
stp x23, x24, [ctx, #(8 * 4)]; \ stp x23, x24, [ctx, #(THREAD_CONTEXT_X23_X24)]; \
stp x25, x26, [ctx, #(8 * 6)]; \ stp x25, x26, [ctx, #(THREAD_CONTEXT_X25_X26)]; \
stp x27, x28, [ctx, #(8 * 8)]; \ stp x27, x28, [ctx, #(THREAD_CONTEXT_X27_X28)]; \
stp x29, x30, [ctx, #(8 * 10)]; \ stp x29, x30, [ctx, #(THREAD_CONTEXT_X29_X30)]; \
\ \
stp tmp0, tmp1, [ctx, #0x60]; \ stp tmp0, tmp1, [ctx, #(THREAD_CONTEXT_SP_CPACR)]; \
\ \
/* Check whether the FPU is enabled. */ \ /* Check whether the FPU is enabled. */ \
/* If it isn't, skip saving FPU state. */ \ /* If it isn't, skip saving FPU state. */ \
and tmp1, tmp1, #0x300000; \ and tmp1, tmp1, #0x300000; \
cbz tmp1, done_label; \ cbz tmp1, done_label; \
\ \
/* Save fpcr and fpsr. */ \ /* Save fpcr and fpsr. */ \
mrs tmp0, fpcr; \ mrs tmp0, fpcr; \
mrs tmp1, fpsr; \ mrs tmp1, fpsr; \
stp tmp0, tmp1, [ctx, #0x70]; \ stp tmp0, tmp1, [ctx, #(THREAD_CONTEXT_FPCR_FPSR)]; \
\ \
/* Save the FPU registers. */ \ /* Save the FPU registers. */ \
stp q0, q1, [ctx, #(16 * 0 + 0x80)]; \ stp q0, q1, [ctx, #(16 * 0 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q2, q3, [ctx, #(16 * 2 + 0x80)]; \ stp q2, q3, [ctx, #(16 * 2 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q4, q5, [ctx, #(16 * 4 + 0x80)]; \ stp q4, q5, [ctx, #(16 * 4 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q6, q7, [ctx, #(16 * 6 + 0x80)]; \ stp q6, q7, [ctx, #(16 * 6 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q8, q9, [ctx, #(16 * 8 + 0x80)]; \ stp q8, q9, [ctx, #(16 * 8 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q10, q11, [ctx, #(16 * 10 + 0x80)]; \ stp q10, q11, [ctx, #(16 * 10 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q12, q13, [ctx, #(16 * 12 + 0x80)]; \ stp q12, q13, [ctx, #(16 * 12 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q14, q15, [ctx, #(16 * 14 + 0x80)]; \ stp q14, q15, [ctx, #(16 * 14 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q16, q17, [ctx, #(16 * 16 + 0x80)]; \ stp q16, q17, [ctx, #(16 * 16 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q18, q19, [ctx, #(16 * 18 + 0x80)]; \ stp q18, q19, [ctx, #(16 * 18 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q20, q21, [ctx, #(16 * 20 + 0x80)]; \ stp q20, q21, [ctx, #(16 * 20 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q22, q23, [ctx, #(16 * 22 + 0x80)]; \ stp q22, q23, [ctx, #(16 * 22 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q24, q25, [ctx, #(16 * 24 + 0x80)]; \ stp q24, q25, [ctx, #(16 * 24 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q26, q27, [ctx, #(16 * 26 + 0x80)]; \ stp q26, q27, [ctx, #(16 * 26 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q28, q29, [ctx, #(16 * 28 + 0x80)]; \ stp q28, q29, [ctx, #(16 * 28 + THREAD_CONTEXT_FPU_REGISTERS)]; \
stp q30, q31, [ctx, #(16 * 30 + 0x80)]; stp q30, q31, [ctx, #(16 * 30 + THREAD_CONTEXT_FPU_REGISTERS)];
#define RESTORE_THREAD_CONTEXT(ctx, tmp0, tmp1, done_label) \ #define RESTORE_THREAD_CONTEXT(ctx, tmp0, tmp1, done_label) \
/* Restore the callee save registers + SP and cpacr. */ \ /* Restore the callee save registers + SP and cpacr. */ \
ldp tmp0, tmp1, [ctx, #0x60]; \ ldp tmp0, tmp1, [ctx, #(THREAD_CONTEXT_SP_CPACR)]; \
mov sp, tmp0; \ mov sp, tmp0; \
ldp x19, x20, [ctx, #(8 * 0)]; \ ldp x19, x20, [ctx, #(THREAD_CONTEXT_X19_X20)]; \
ldp x21, x22, [ctx, #(8 * 2)]; \ ldp x21, x22, [ctx, #(THREAD_CONTEXT_X21_X22)]; \
ldp x23, x24, [ctx, #(8 * 4)]; \ ldp x23, x24, [ctx, #(THREAD_CONTEXT_X23_X24)]; \
ldp x25, x26, [ctx, #(8 * 6)]; \ ldp x25, x26, [ctx, #(THREAD_CONTEXT_X25_X26)]; \
ldp x27, x28, [ctx, #(8 * 8)]; \ ldp x27, x28, [ctx, #(THREAD_CONTEXT_X27_X28)]; \
ldp x29, x30, [ctx, #(8 * 10)]; \ ldp x29, x30, [ctx, #(THREAD_CONTEXT_X29_X30)]; \
\ \
msr cpacr_el1, tmp1; \ msr cpacr_el1, tmp1; \
isb; \ isb; \
\ \
/* Check whether the FPU is enabled. */ \ /* Check whether the FPU is enabled. */ \
/* If it isn't, skip saving FPU state. */ \ /* If it isn't, skip saving FPU state. */ \
and tmp1, tmp1, #0x300000; \ and tmp1, tmp1, #0x300000; \
cbz tmp1, done_label; \ cbz tmp1, done_label; \
\ \
/* Save fpcr and fpsr. */ \ /* Save fpcr and fpsr. */ \
ldp tmp0, tmp1, [ctx, #0x70]; \ ldp tmp0, tmp1, [ctx, #(THREAD_CONTEXT_FPCR_FPSR)]; \
msr fpcr, tmp0; \ msr fpcr, tmp0; \
msr fpsr, tmp1; \ msr fpsr, tmp1; \
\ \
/* Save the FPU registers. */ \ /* Save the FPU registers. */ \
ldp q0, q1, [ctx, #(16 * 0 + 0x80)]; \ ldp q0, q1, [ctx, #(16 * 0 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q2, q3, [ctx, #(16 * 2 + 0x80)]; \ ldp q2, q3, [ctx, #(16 * 2 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q4, q5, [ctx, #(16 * 4 + 0x80)]; \ ldp q4, q5, [ctx, #(16 * 4 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q6, q7, [ctx, #(16 * 6 + 0x80)]; \ ldp q6, q7, [ctx, #(16 * 6 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q8, q9, [ctx, #(16 * 8 + 0x80)]; \ ldp q8, q9, [ctx, #(16 * 8 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q10, q11, [ctx, #(16 * 10 + 0x80)]; \ ldp q10, q11, [ctx, #(16 * 10 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q12, q13, [ctx, #(16 * 12 + 0x80)]; \ ldp q12, q13, [ctx, #(16 * 12 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q14, q15, [ctx, #(16 * 14 + 0x80)]; \ ldp q14, q15, [ctx, #(16 * 14 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q16, q17, [ctx, #(16 * 16 + 0x80)]; \ ldp q16, q17, [ctx, #(16 * 16 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q18, q19, [ctx, #(16 * 18 + 0x80)]; \ ldp q18, q19, [ctx, #(16 * 18 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q20, q21, [ctx, #(16 * 20 + 0x80)]; \ ldp q20, q21, [ctx, #(16 * 20 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q22, q23, [ctx, #(16 * 22 + 0x80)]; \ ldp q22, q23, [ctx, #(16 * 22 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q24, q25, [ctx, #(16 * 24 + 0x80)]; \ ldp q24, q25, [ctx, #(16 * 24 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q26, q27, [ctx, #(16 * 26 + 0x80)]; \ ldp q26, q27, [ctx, #(16 * 26 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q28, q29, [ctx, #(16 * 28 + 0x80)]; \ ldp q28, q29, [ctx, #(16 * 28 + THREAD_CONTEXT_FPU_REGISTERS)]; \
ldp q30, q31, [ctx, #(16 * 30 + 0x80)]; ldp q30, q31, [ctx, #(16 * 30 + THREAD_CONTEXT_FPU_REGISTERS)];
/* ams::kern::KScheduler::ScheduleImpl() */ /* ams::kern::KScheduler::ScheduleImpl() */
@ -109,11 +109,11 @@
_ZN3ams4kern10KScheduler12ScheduleImplEv: _ZN3ams4kern10KScheduler12ScheduleImplEv:
/* Right now, x0 contains (this). We want x1 to point to the scheduling state, */ /* Right now, x0 contains (this). We want x1 to point to the scheduling state, */
/* Current KScheduler layout has state at +0x0. */ /* KScheduler layout has state at +0x0, this is guaranteed statically by assembly offsets. */
mov x1, x0 mov x1, x0
/* First thing we want to do is check whether the interrupt task thread is runnable. */ /* First thing we want to do is check whether the interrupt task thread is runnable. */
ldrb w3, [x1, #1] ldrb w3, [x1, #(KSCHEDULER_INTERRUPT_TASK_THREAD_RUNNABLE)]
cbz w3, 0f cbz w3, 0f
/* If it is, we want to call KScheduler::InterruptTaskThreadToRunnable() to change its state to runnable. */ /* If it is, we want to call KScheduler::InterruptTaskThreadToRunnable() to change its state to runnable. */
@ -124,7 +124,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
ldp x0, x1, [sp], 16 ldp x0, x1, [sp], 16
/* Clear the interrupt task thread as runnable. */ /* Clear the interrupt task thread as runnable. */
strb wzr, [x1, #1] strb wzr, [x1, #(KSCHEDULER_INTERRUPT_TASK_THREAD_RUNNABLE)]
0: /* Interrupt task thread runnable checked. */ 0: /* Interrupt task thread runnable checked. */
/* Now we want to check if there's any scheduling to do. */ /* Now we want to check if there's any scheduling to do. */
@ -135,7 +135,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
dmb ish dmb ish
/* Check if the highest priority thread is the same as the current thread. */ /* Check if the highest priority thread is the same as the current thread. */
ldr x7, [x1, 16] ldr x7, [x1, #(KSCHEDULER_HIGHEST_PRIORITY_THREAD)]
ldr x2, [x18] ldr x2, [x18]
cmp x7, x2 cmp x7, x2
b.ne 1f b.ne 1f
@ -158,7 +158,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
2: /* We're done saving this thread's context, so we need to unlock it. */ 2: /* We're done saving this thread's context, so we need to unlock it. */
/* We can just do an atomic write to the relevant KThreadContext member. */ /* We can just do an atomic write to the relevant KThreadContext member. */
add x2, x2, #0x280 add x2, x2, #(THREAD_CONTEXT_LOCKED)
stlrb wzr, [x2] stlrb wzr, [x2]
3: /* The current thread's context has been entirely taken care of. */ 3: /* The current thread's context has been entirely taken care of. */
@ -169,7 +169,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
mov x21, x7 /* highest priority thread */ mov x21, x7 /* highest priority thread */
/* Set our stack to the idle thread stack. */ /* Set our stack to the idle thread stack. */
ldr x3, [x20, #0x18] ldr x3, [x20, #(KSCHEDULER_IDLE_THREAD_STACK)]
mov sp, x3 mov sp, x3
b 5f b 5f
@ -183,7 +183,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
dmb ish dmb ish
/* Refresh the highest priority thread. */ /* Refresh the highest priority thread. */
ldr x21, [x20, 16] ldr x21, [x20, #(KSCHEDULER_HIGHEST_PRIORITY_THREAD)]
5: /* We're starting to try to do the context switch. */ 5: /* We're starting to try to do the context switch. */
/* Check if the highest priority thread if null. */ /* Check if the highest priority thread if null. */
@ -197,7 +197,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
mov x22, x0 mov x22, x0
/* Prepare to try to acquire the context lock. */ /* Prepare to try to acquire the context lock. */
add x1, x22, #0x280 add x1, x22, #(THREAD_CONTEXT_LOCKED)
mov w2, #1 mov w2, #1
6: /* We want to try to lock the highest priority thread's context. */ 6: /* We want to try to lock the highest priority thread's context. */
@ -214,7 +214,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
7: /* The highest priority thread's context is already locked. */ 7: /* The highest priority thread's context is already locked. */
/* Check if we need scheduling. If we don't, we can retry directly. */ /* Check if we need scheduling. If we don't, we can retry directly. */
ldarb w3, [x20] ldarb w3, [x20] // ldarb w3, [x20, #(KSCHEDULER_NEEDS_SCHEDULING)]
cbz w3, 6b cbz w3, 6b
/* If we do, another core is interfering, and we must start from the top. */ /* If we do, another core is interfering, and we must start from the top. */
@ -229,7 +229,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
bl _ZN3ams4kern10KScheduler12SwitchThreadEPNS0_7KThreadE bl _ZN3ams4kern10KScheduler12SwitchThreadEPNS0_7KThreadE
/* Check if we need scheduling. If we don't, then we can't complete the switch and should retry. */ /* Check if we need scheduling. If we don't, then we can't complete the switch and should retry. */
ldarb w1, [x20] ldarb w1, [x20] // ldarb w1, [x20, #(KSCHEDULER_NEEDS_SCHEDULING)]
cbnz w1, 10f cbnz w1, 10f
/* Restore the thread context. */ /* Restore the thread context. */
@ -241,7 +241,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
10: /* Our switch failed. */ 10: /* Our switch failed. */
/* We should unlock the thread context, and then retry. */ /* We should unlock the thread context, and then retry. */
add x1, x22, #0x280 add x1, x22, #(THREAD_CONTEXT_LOCKED)
stlrb wzr, [x1] stlrb wzr, [x1]
b 4b b 4b
@ -255,7 +255,7 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
12: /* We've switched to the idle thread, so we want to loop until we schedule a non-idle thread. */ 12: /* We've switched to the idle thread, so we want to loop until we schedule a non-idle thread. */
/* Check if we need scheduling. */ /* Check if we need scheduling. */
ldarb w3, [x20] ldarb w3, [x20] // ldarb w3, [x20, #(KSCHEDULER_NEEDS_SCHEDULING)]
cbnz w3, 13f cbnz w3, 13f
/* If we don't, wait for an interrupt and check again. */ /* If we don't, wait for an interrupt and check again. */
@ -268,14 +268,14 @@ _ZN3ams4kern10KScheduler12ScheduleImplEv:
13: /* We need scheduling again! */ 13: /* We need scheduling again! */
/* Check whether the interrupt task thread needs to be set runnable. */ /* Check whether the interrupt task thread needs to be set runnable. */
ldrb w3, [x20, #1] ldrb w3, [x20, #(KSCHEDULER_INTERRUPT_TASK_THREAD_RUNNABLE)]
cbz w3, 4b cbz w3, 4b
/* It does, so do so. We're using the idle thread stack so no register state preserve needed. */ /* It does, so do so. We're using the idle thread stack so no register state preserve needed. */
bl _ZN3ams4kern10KScheduler29InterruptTaskThreadToRunnableEv bl _ZN3ams4kern10KScheduler29InterruptTaskThreadToRunnableEv
/* Clear the interrupt task thread as runnable. */ /* Clear the interrupt task thread as runnable. */
strb wzr, [x20, #1] strb wzr, [x20, #(KSCHEDULER_INTERRUPT_TASK_THREAD_RUNNABLE)]
/* Retry the scheduling loop. */ /* Retry the scheduling loop. */
b 4b b 4b

View file

@ -27,39 +27,39 @@ _ZN3ams4kern4arch5arm6421UserModeThreadStarterEv:
/* | KExceptionContext (size 0x120) | KThread::StackParameters (size 0x30) | */ /* | KExceptionContext (size 0x120) | KThread::StackParameters (size 0x30) | */
/* Clear the disable count for this thread's stack parameters. */ /* Clear the disable count for this thread's stack parameters. */
strh wzr, [sp, #(0x120 + THREAD_STACK_PARAMETERS_DISABLE_COUNT)] strh wzr, [sp, #(EXCEPTION_CONTEXT_SIZE + THREAD_STACK_PARAMETERS_DISABLE_COUNT)]
/* Call ams::kern::arch::arm64::OnThreadStart() */ /* Call ams::kern::arch::arm64::OnThreadStart() */
bl _ZN3ams4kern4arch5arm6413OnThreadStartEv bl _ZN3ams4kern4arch5arm6413OnThreadStartEv
/* Restore thread state from the KExceptionContext on stack */ /* Restore thread state from the KExceptionContext on stack */
ldp x30, x19, [sp, #(8 * 30)] /* x30 = lr, x19 = sp */ ldp x30, x19, [sp, #(EXCEPTION_CONTEXT_X30_SP)] /* x30 = lr, x19 = sp */
ldp x20, x21, [sp, #(8 * 30 + 16)] /* x20 = pc, x21 = psr */ ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_PC_PSR)] /* x20 = pc, x21 = psr */
ldr x22, [sp, #(8 * 30 + 32)] /* x22 = tpidr */ ldr x22, [sp, #(EXCEPTION_CONTEXT_TPIDR)] /* x22 = tpidr */
msr sp_el0, x19 msr sp_el0, x19
msr elr_el1, x20 msr elr_el1, x20
msr spsr_el1, x21 msr spsr_el1, x21
msr tpidr_el0, x22 msr tpidr_el0, x22
ldp x0, x1, [sp, #(8 * 0)] ldp x0, x1, [sp, #(EXCEPTION_CONTEXT_X0_X1)]
ldp x2, x3, [sp, #(8 * 2)] ldp x2, x3, [sp, #(EXCEPTION_CONTEXT_X2_X3)]
ldp x4, x5, [sp, #(8 * 4)] ldp x4, x5, [sp, #(EXCEPTION_CONTEXT_X4_X5)]
ldp x6, x7, [sp, #(8 * 6)] ldp x6, x7, [sp, #(EXCEPTION_CONTEXT_X6_X7)]
ldp x8, x9, [sp, #(8 * 8)] ldp x8, x9, [sp, #(EXCEPTION_CONTEXT_X8_X9)]
ldp x10, x11, [sp, #(8 * 10)] ldp x10, x11, [sp, #(EXCEPTION_CONTEXT_X10_X11)]
ldp x12, x13, [sp, #(8 * 12)] ldp x12, x13, [sp, #(EXCEPTION_CONTEXT_X12_X13)]
ldp x14, x15, [sp, #(8 * 14)] ldp x14, x15, [sp, #(EXCEPTION_CONTEXT_X14_X15)]
ldp x16, x17, [sp, #(8 * 16)] ldp x16, x17, [sp, #(EXCEPTION_CONTEXT_X16_X17)]
ldp x18, x19, [sp, #(8 * 18)] ldp x18, x19, [sp, #(EXCEPTION_CONTEXT_X18_X19)]
ldp x20, x21, [sp, #(8 * 20)] ldp x20, x21, [sp, #(EXCEPTION_CONTEXT_X20_X21)]
ldp x22, x23, [sp, #(8 * 22)] ldp x22, x23, [sp, #(EXCEPTION_CONTEXT_X22_X23)]
ldp x24, x25, [sp, #(8 * 24)] ldp x24, x25, [sp, #(EXCEPTION_CONTEXT_X24_X25)]
ldp x26, x27, [sp, #(8 * 26)] ldp x26, x27, [sp, #(EXCEPTION_CONTEXT_X26_X27)]
ldp x28, x29, [sp, #(8 * 28)] ldp x28, x29, [sp, #(EXCEPTION_CONTEXT_X28_X29)]
/* Increment stack pointer above the KExceptionContext */ /* Increment stack pointer above the KExceptionContext */
add sp, sp, #0x120 add sp, sp, #(EXCEPTION_CONTEXT_SIZE)
/* Return to EL0 */ /* Return to EL0 */
eret eret
@ -95,28 +95,28 @@ _ZN3ams4kern4arch5arm6427SupervisorModeThreadStarterEv:
.type _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters64ERKS3_, %function .type _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters64ERKS3_, %function
_ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters64ERKS3_: _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters64ERKS3_:
/* Load and restore FPCR and FPSR from the context. */ /* Load and restore FPCR and FPSR from the context. */
ldr x1, [x0, #0x70] ldr x1, [x0, #(THREAD_CONTEXT_FPCR)]
msr fpcr, x1 msr fpcr, x1
ldr x1, [x0, #0x78] ldr x1, [x0, #(THREAD_CONTEXT_FPSR)]
msr fpsr, x1 msr fpsr, x1
/* Restore the FPU registers. */ /* Restore the FPU registers. */
ldp q0, q1, [x0, #(16 * 0 + 0x80)] ldp q0, q1, [x0, #(16 * 0 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q2, q3, [x0, #(16 * 2 + 0x80)] ldp q2, q3, [x0, #(16 * 2 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q4, q5, [x0, #(16 * 4 + 0x80)] ldp q4, q5, [x0, #(16 * 4 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q6, q7, [x0, #(16 * 6 + 0x80)] ldp q6, q7, [x0, #(16 * 6 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q8, q9, [x0, #(16 * 8 + 0x80)] ldp q8, q9, [x0, #(16 * 8 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q10, q11, [x0, #(16 * 10 + 0x80)] ldp q10, q11, [x0, #(16 * 10 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q12, q13, [x0, #(16 * 12 + 0x80)] ldp q12, q13, [x0, #(16 * 12 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q14, q15, [x0, #(16 * 14 + 0x80)] ldp q14, q15, [x0, #(16 * 14 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q16, q17, [x0, #(16 * 16 + 0x80)] ldp q16, q17, [x0, #(16 * 16 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q18, q19, [x0, #(16 * 18 + 0x80)] ldp q18, q19, [x0, #(16 * 18 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q20, q21, [x0, #(16 * 20 + 0x80)] ldp q20, q21, [x0, #(16 * 20 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q22, q23, [x0, #(16 * 22 + 0x80)] ldp q22, q23, [x0, #(16 * 22 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q24, q25, [x0, #(16 * 24 + 0x80)] ldp q24, q25, [x0, #(16 * 24 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q26, q27, [x0, #(16 * 26 + 0x80)] ldp q26, q27, [x0, #(16 * 26 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q28, q29, [x0, #(16 * 28 + 0x80)] ldp q28, q29, [x0, #(16 * 28 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q30, q31, [x0, #(16 * 30 + 0x80)] ldp q30, q31, [x0, #(16 * 30 + THREAD_CONTEXT_FPU_REGISTERS)]
ret ret
@ -126,19 +126,19 @@ _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters64ERKS3_:
.type _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters32ERKS3_, %function .type _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters32ERKS3_, %function
_ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters32ERKS3_: _ZN3ams4kern4arch5arm6414KThreadContext21RestoreFpuRegisters32ERKS3_:
/* Load and restore FPCR and FPSR from the context. */ /* Load and restore FPCR and FPSR from the context. */
ldr x1, [x0, #0x70] ldr x1, [x0, #(THREAD_CONTEXT_FPCR)]
msr fpcr, x1 msr fpcr, x1
ldr x1, [x0, #0x78] ldr x1, [x0, #(THREAD_CONTEXT_FPSR)]
msr fpsr, x1 msr fpsr, x1
/* Restore the FPU registers. */ /* Restore the FPU registers. */
ldp q0, q1, [x0, #(16 * 0 + 0x80)] ldp q0, q1, [x0, #(16 * 0 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q2, q3, [x0, #(16 * 2 + 0x80)] ldp q2, q3, [x0, #(16 * 2 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q4, q5, [x0, #(16 * 4 + 0x80)] ldp q4, q5, [x0, #(16 * 4 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q6, q7, [x0, #(16 * 6 + 0x80)] ldp q6, q7, [x0, #(16 * 6 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q8, q9, [x0, #(16 * 8 + 0x80)] ldp q8, q9, [x0, #(16 * 8 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q10, q11, [x0, #(16 * 10 + 0x80)] ldp q10, q11, [x0, #(16 * 10 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q12, q13, [x0, #(16 * 12 + 0x80)] ldp q12, q13, [x0, #(16 * 12 + THREAD_CONTEXT_FPU_REGISTERS)]
ldp q14, q15, [x0, #(16 * 14 + 0x80)] ldp q14, q15, [x0, #(16 * 14 + THREAD_CONTEXT_FPU_REGISTERS)]
ret ret