kern: fix bugs caused by UB + transition to -Os

This commit is contained in:
Michael Scire 2020-12-01 18:41:44 -08:00 committed by SciresM
parent 1b164613a6
commit 3c8e7de915
4 changed files with 49 additions and 45 deletions

View file

@ -25,7 +25,7 @@ namespace ams::kern::arch::arm64 {
public:
constexpr KNotAlignedSpinLock() : packed_tickets(0) { /* ... */ }
void Lock() {
ALWAYS_INLINE void Lock() {
u32 tmp0, tmp1, tmp2;
__asm__ __volatile__(
@ -52,7 +52,7 @@ namespace ams::kern::arch::arm64 {
);
}
void Unlock() {
ALWAYS_INLINE void Unlock() {
const u32 value = this->packed_tickets + 1;
__asm__ __volatile__(
" stlrh %w[value], %[packed_tickets]\n"
@ -71,7 +71,7 @@ namespace ams::kern::arch::arm64 {
public:
constexpr KAlignedSpinLock() : current_ticket(0), next_ticket(0) { /* ... */ }
void Lock() {
ALWAYS_INLINE void Lock() {
u32 tmp0, tmp1, got_lock;
__asm__ __volatile__(
@ -94,7 +94,7 @@ namespace ams::kern::arch::arm64 {
);
}
void Unlock() {
ALWAYS_INLINE void Unlock() {
const u32 value = this->current_ticket + 1;
__asm__ __volatile__(
" stlrh %w[value], %[current_ticket]\n"

View file

@ -42,7 +42,7 @@ namespace ams::kern {
u32 prev_intr_state;
public:
ALWAYS_INLINE KScopedInterruptDisable() : prev_intr_state(KInterruptManager::DisableInterrupts()) { /* ... */ }
~KScopedInterruptDisable() { KInterruptManager::RestoreInterrupts(prev_intr_state); }
ALWAYS_INLINE ~KScopedInterruptDisable() { KInterruptManager::RestoreInterrupts(prev_intr_state); }
};
class KScopedInterruptEnable {
@ -52,7 +52,7 @@ namespace ams::kern {
u32 prev_intr_state;
public:
ALWAYS_INLINE KScopedInterruptEnable() : prev_intr_state(KInterruptManager::EnableInterrupts()) { /* ... */ }
~KScopedInterruptEnable() { KInterruptManager::RestoreInterrupts(prev_intr_state); }
ALWAYS_INLINE ~KScopedInterruptEnable() { KInterruptManager::RestoreInterrupts(prev_intr_state); }
};
}

View file

@ -58,28 +58,30 @@ namespace ams::kern::board::nintendo::nx::smc {
/* Disable interrupts while making the call. */
KScopedInterruptDisable intr_disable;
/* Backup the current thread pointer. */
const uintptr_t current_thread_pointer_value = cpu::GetCurrentThreadPointerValue();
{
/* Backup the current thread pointer. */
const uintptr_t current_thread_pointer_value = cpu::GetCurrentThreadPointerValue();
__asm__ __volatile__("smc #1"
: "+r"(x0), "+r"(x1), "+r"(x2), "+r"(x3), "+r"(x4), "+r"(x5), "+r"(x6), "+r"(x7)
:
: "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "cc", "memory"
);
__asm__ __volatile__("smc #1"
: "+r"(x0), "+r"(x1), "+r"(x2), "+r"(x3), "+r"(x4), "+r"(x5), "+r"(x6), "+r"(x7)
:
: "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "cc", "memory"
);
/* Restore the current thread pointer into X18. */
cpu::SetCurrentThreadPointerValue(current_thread_pointer_value);
/* Restore the current thread pointer into X18. */
cpu::SetCurrentThreadPointerValue(current_thread_pointer_value);
/* Store arguments to output. */
args.x[0] = x0;
args.x[1] = x1;
args.x[2] = x2;
args.x[3] = x3;
args.x[4] = x4;
args.x[5] = x5;
args.x[6] = x6;
args.x[7] = x7;
}
}
/* Store arguments to output. */
args.x[0] = x0;
args.x[1] = x1;
args.x[2] = x2;
args.x[3] = x3;
args.x[4] = x4;
args.x[5] = x5;
args.x[6] = x6;
args.x[7] = x7;
}
void CallUserSecureMonitorFunction(ams::svc::lp64::SecureMonitorArguments *args) {
@ -98,28 +100,30 @@ namespace ams::kern::board::nintendo::nx::smc {
/* Disable interrupts while making the call. */
KScopedInterruptDisable intr_disable;
/* Backup the current thread pointer. */
const uintptr_t current_thread_pointer_value = cpu::GetCurrentThreadPointerValue();
{
/* Backup the current thread pointer. */
const uintptr_t current_thread_pointer_value = cpu::GetCurrentThreadPointerValue();
__asm__ __volatile__("smc #0"
: "+r"(x0), "+r"(x1), "+r"(x2), "+r"(x3), "+r"(x4), "+r"(x5), "+r"(x6), "+r"(x7)
:
: "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "cc", "memory"
);
__asm__ __volatile__("smc #1"
: "+r"(x0), "+r"(x1), "+r"(x2), "+r"(x3), "+r"(x4), "+r"(x5), "+r"(x6), "+r"(x7)
:
: "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "cc", "memory"
);
/* Restore the current thread pointer into X18. */
cpu::SetCurrentThreadPointerValue(current_thread_pointer_value);
/* Restore the current thread pointer into X18. */
cpu::SetCurrentThreadPointerValue(current_thread_pointer_value);
/* Store arguments to output. */
args->r[0] = x0;
args->r[1] = x1;
args->r[2] = x2;
args->r[3] = x3;
args->r[4] = x4;
args->r[5] = x5;
args->r[6] = x6;
args->r[7] = x7;
}
}
/* Store arguments to output. */
args->r[0] = x0;
args->r[1] = x1;
args->r[2] = x2;
args->r[3] = x3;
args->r[4] = x4;
args->r[5] = x5;
args->r[6] = x6;
args->r[7] = x7;
}
void CallPrivilegedSecureMonitorFunctionForInit(SecureMonitorArguments &args) {

View file

@ -63,7 +63,7 @@ namespace ams::kern {
}
[[fallthrough]];
case ThreadType_HighPriority:
{
if (type != ThreadType_Main) {
MESOSPHERE_ASSERT(phys_core == GetCurrentCoreId());
}
[[fallthrough]];