mirror of
https://github.com/Atmosphere-NX/Atmosphere
synced 2024-11-10 07:06:34 +00:00
kern: mem access prep for svc streams, TODO_IMPLEMENT -> UNIMPLEMENTED
This commit is contained in:
parent
40980904f7
commit
34fb48b412
11 changed files with 396 additions and 25 deletions
|
@ -36,6 +36,7 @@ namespace ams::kern::arch::arm64 {
|
||||||
|
|
||||||
static bool ClearMemory(void *dst, size_t size);
|
static bool ClearMemory(void *dst, size_t size);
|
||||||
static bool ClearMemoryAligned32Bit(void *dst, size_t size);
|
static bool ClearMemoryAligned32Bit(void *dst, size_t size);
|
||||||
|
static bool ClearMemoryAligned64Bit(void *dst, size_t size);
|
||||||
static bool ClearMemorySize32Bit(void *dst);
|
static bool ClearMemorySize32Bit(void *dst);
|
||||||
|
|
||||||
static bool StoreDataCache(uintptr_t start, uintptr_t end);
|
static bool StoreDataCache(uintptr_t start, uintptr_t end);
|
||||||
|
|
|
@ -66,7 +66,7 @@ namespace ams::kern {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define MESOSPHERE_TODO(arg) ({ constexpr const char *__mesosphere_todo = arg; static_cast<void>(__mesosphere_todo); MESOSPHERE_PANIC("TODO (%s): %s\n", __PRETTY_FUNCTION__, __mesosphere_todo); })
|
#define MESOSPHERE_TODO(arg) ({ constexpr const char *__mesosphere_todo = arg; static_cast<void>(__mesosphere_todo); MESOSPHERE_PANIC("TODO (%s): %s\n", __PRETTY_FUNCTION__, __mesosphere_todo); })
|
||||||
#define MESOSPHERE_TODO_IMPLEMENT() MESOSPHERE_TODO("Implement")
|
#define MESOSPHERE_UNIMPLEMENTED() MESOSPHERE_PANIC("%s: Unimplemented\n", __PRETTY_FUNCTION__)
|
||||||
|
|
||||||
#define MESOSPHERE_ABORT() MESOSPHERE_PANIC("Abort()\n");
|
#define MESOSPHERE_ABORT() MESOSPHERE_PANIC("Abort()\n");
|
||||||
#define MESOSPHERE_INIT_ABORT() do { /* ... */ } while (true)
|
#define MESOSPHERE_INIT_ABORT() do { /* ... */ } while (true)
|
||||||
|
|
|
@ -212,7 +212,7 @@ namespace ams::kern::arch::arm64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTable::Finalize() {
|
Result KPageTable::Finalize() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTable::Operate(PageLinkedList *page_list, KProcessAddress virt_addr, size_t num_pages, KPhysicalAddress phys_addr, bool is_pa_valid, const KPageProperties properties, OperationType operation, bool reuse_ll) {
|
Result KPageTable::Operate(PageLinkedList *page_list, KProcessAddress virt_addr, size_t num_pages, KPhysicalAddress phys_addr, bool is_pa_valid, const KPageProperties properties, OperationType operation, bool reuse_ll) {
|
||||||
|
|
|
@ -40,6 +40,6 @@ namespace ams::kern::arch::arm64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KSupervisorPageTable::Finalize(s32 core_id) {
|
void KSupervisorPageTable::Finalize(s32 core_id) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,15 +18,231 @@
|
||||||
.section .text._ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv
|
.global _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv
|
||||||
.type _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, %function
|
.type _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv:
|
_ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv:
|
||||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
||||||
|
|
||||||
/* ================ All Userspace Access Functions after this line. ================ */
|
/* ================ All Userspace Access Functions after this line. ================ */
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryFromUser(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess18CopyMemoryFromUserEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess18CopyMemoryFromUserEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess18CopyMemoryFromUserEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess18CopyMemoryFromUserEPvPKvm:
|
||||||
|
/* Check if there's anything to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 2f
|
||||||
|
|
||||||
|
/* Keep track of the last address. */
|
||||||
|
add x3, x1, x2
|
||||||
|
|
||||||
|
1: /* We're copying memory byte-by-byte. */
|
||||||
|
ldtrb w2, [x1]
|
||||||
|
strb w2, [x0], #1
|
||||||
|
add x1, x1, #1
|
||||||
|
cmp x1, x3
|
||||||
|
b.ne 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryFromUserAligned32Bit(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned32BitEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned32BitEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned32BitEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned32BitEPvPKvm:
|
||||||
|
/* Check if there are 0x40 bytes to copy */
|
||||||
|
cmp x2, #0x3F
|
||||||
|
b.ls 1f
|
||||||
|
ldtr x4, [x1, #0x00]
|
||||||
|
ldtr x5, [x1, #0x08]
|
||||||
|
ldtr x6, [x1, #0x10]
|
||||||
|
ldtr x7, [x1, #0x18]
|
||||||
|
ldtr x8, [x1, #0x20]
|
||||||
|
ldtr x9, [x1, #0x28]
|
||||||
|
ldtr x10, [x1, #0x30]
|
||||||
|
ldtr x11, [x1, #0x38]
|
||||||
|
stp x4, x5, [x0, #0x00]
|
||||||
|
stp x6, x7, [x0, #0x10]
|
||||||
|
stp x8, x9, [x0, #0x20]
|
||||||
|
stp x10, x11, [x0, #0x30]
|
||||||
|
add x0, x0, #0x40
|
||||||
|
add x1, x1, #0x40
|
||||||
|
sub x2, x2, #0x40
|
||||||
|
b _ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned32BitEPvPKvm
|
||||||
|
|
||||||
|
1: /* We have less than 0x40 bytes to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 2f
|
||||||
|
ldtr w4, [x1]
|
||||||
|
str w4, [x0], #4
|
||||||
|
add x1, x1, #4
|
||||||
|
sub x2, x2, #4
|
||||||
|
b 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryFromUserAligned64Bit(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned64BitEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned64BitEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned64BitEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned64BitEPvPKvm:
|
||||||
|
/* Check if there are 0x40 bytes to copy */
|
||||||
|
cmp x2, #0x3F
|
||||||
|
b.ls 1f
|
||||||
|
ldtr x4, [x1, #0x00]
|
||||||
|
ldtr x5, [x1, #0x08]
|
||||||
|
ldtr x6, [x1, #0x10]
|
||||||
|
ldtr x7, [x1, #0x18]
|
||||||
|
ldtr x8, [x1, #0x20]
|
||||||
|
ldtr x9, [x1, #0x28]
|
||||||
|
ldtr x10, [x1, #0x30]
|
||||||
|
ldtr x11, [x1, #0x38]
|
||||||
|
stp x4, x5, [x0, #0x00]
|
||||||
|
stp x6, x7, [x0, #0x10]
|
||||||
|
stp x8, x9, [x0, #0x20]
|
||||||
|
stp x10, x11, [x0, #0x30]
|
||||||
|
add x0, x0, #0x40
|
||||||
|
add x1, x1, #0x40
|
||||||
|
sub x2, x2, #0x40
|
||||||
|
b _ZN3ams4kern4arch5arm6415UserspaceAccess30CopyMemoryFromUserAligned64BitEPvPKvm
|
||||||
|
|
||||||
|
1: /* We have less than 0x40 bytes to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 2f
|
||||||
|
ldtr x4, [x1]
|
||||||
|
str x4, [x0], #8
|
||||||
|
add x1, x1, #8
|
||||||
|
sub x2, x2, #8
|
||||||
|
b 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryFromUserSize32Bit(void *dst, const void *src) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess27CopyMemoryFromUserSize32BitEPvPKv, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess27CopyMemoryFromUserSize32BitEPvPKv
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess27CopyMemoryFromUserSize32BitEPvPKv, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess27CopyMemoryFromUserSize32BitEPvPKv:
|
||||||
|
/* Just load and store a u32. */
|
||||||
|
ldtr w2, [x1]
|
||||||
|
str w2, [x0]
|
||||||
|
|
||||||
|
/* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyStringFromUser(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess18CopyStringFromUserEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess18CopyStringFromUserEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess18CopyStringFromUserEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess18CopyStringFromUserEPvPKvm:
|
||||||
|
/* Check if there's anything to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 3f
|
||||||
|
|
||||||
|
/* Keep track of the start address and last address. */
|
||||||
|
mov x4, x1
|
||||||
|
add x3, x1, x2
|
||||||
|
|
||||||
|
1: /* We're copying memory byte-by-byte. */
|
||||||
|
ldtrb w2, [x1]
|
||||||
|
strb w2, [x0], #1
|
||||||
|
add x1, x1, #1
|
||||||
|
|
||||||
|
/* If we read a null terminator, we're done. */
|
||||||
|
cmp w2, #0
|
||||||
|
b.eq 2f
|
||||||
|
|
||||||
|
/* Check if we're done. */
|
||||||
|
cmp x1, x3
|
||||||
|
b.ne 1b
|
||||||
|
|
||||||
|
2: /* We're done, and we copied some amount of data from the string. */
|
||||||
|
sub x0, x1, x4
|
||||||
|
ret
|
||||||
|
|
||||||
|
3: /* We're done, and there was no string data. */
|
||||||
|
mov x0, #0
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUser(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess16CopyMemoryToUserEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess16CopyMemoryToUserEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess16CopyMemoryToUserEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess16CopyMemoryToUserEPvPKvm:
|
||||||
|
/* Check if there's anything to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 2f
|
||||||
|
|
||||||
|
/* Keep track of the last address. */
|
||||||
|
add x3, x1, x2
|
||||||
|
|
||||||
|
1: /* We're copying memory byte-by-byte. */
|
||||||
|
ldrb w2, [x1], #1
|
||||||
|
sttrb w2, [x0]
|
||||||
|
add x0, x0, #1
|
||||||
|
cmp x1, x3
|
||||||
|
b.ne 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserAligned32Bit(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned32BitEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned32BitEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned32BitEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned32BitEPvPKvm:
|
||||||
|
/* Check if there are 0x40 bytes to copy */
|
||||||
|
cmp x2, #0x3F
|
||||||
|
b.ls 1f
|
||||||
|
ldp x4, x5, [x1, #0x00]
|
||||||
|
ldp x6, x7, [x1, #0x10]
|
||||||
|
ldp x8, x9, [x1, #0x20]
|
||||||
|
ldp x10, x11, [x1, #0x30]
|
||||||
|
sttr x4, [x0, #0x00]
|
||||||
|
sttr x5, [x0, #0x08]
|
||||||
|
sttr x6, [x0, #0x10]
|
||||||
|
sttr x7, [x0, #0x18]
|
||||||
|
sttr x8, [x0, #0x20]
|
||||||
|
sttr x9, [x0, #0x28]
|
||||||
|
sttr x10, [x0, #0x30]
|
||||||
|
sttr x11, [x0, #0x38]
|
||||||
|
add x0, x0, #0x40
|
||||||
|
add x1, x1, #0x40
|
||||||
|
sub x2, x2, #0x40
|
||||||
|
b _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned32BitEPvPKvm
|
||||||
|
|
||||||
|
1: /* We have less than 0x40 bytes to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 2f
|
||||||
|
ldr w4, [x1], #4
|
||||||
|
sttr w4, [x0]
|
||||||
|
add x0, x0, #4
|
||||||
|
sub x2, x2, #4
|
||||||
|
b 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size) */
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size) */
|
||||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
||||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, %function
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm:
|
_ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm:
|
||||||
/* Check if there are 0x40 bytes to copy */
|
/* Check if there are 0x40 bytes to copy */
|
||||||
cmp x2, #0x3F
|
cmp x2, #0x3F
|
||||||
|
@ -49,22 +265,174 @@ _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm:
|
||||||
b _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
b _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
||||||
|
|
||||||
1: /* We have less than 0x40 bytes to copy. */
|
1: /* We have less than 0x40 bytes to copy. */
|
||||||
cmp x2, #0x0
|
cmp x2, #0
|
||||||
b.eq 2f
|
b.eq 2f
|
||||||
ldr x4, [x1], #0x8
|
ldr x4, [x1], #8
|
||||||
sttr x4, [x0]
|
sttr x4, [x0]
|
||||||
add x0, x0, #0x8
|
add x0, x0, #8
|
||||||
sub x2, x2, #0x8
|
sub x2, x2, #8
|
||||||
b 1b
|
b 1b
|
||||||
|
|
||||||
2: /* We're done. */
|
2: /* We're done. */
|
||||||
mov x0, #1
|
mov x0, #1
|
||||||
ret
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserSize32Bit(void *dst, const void *src) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess25CopyMemoryToUserSize32BitEPvPKv, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess25CopyMemoryToUserSize32BitEPvPKv
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess25CopyMemoryToUserSize32BitEPvPKv, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess25CopyMemoryToUserSize32BitEPvPKv:
|
||||||
|
/* Just load and store a u32. */
|
||||||
|
ldr w2, [x1]
|
||||||
|
sttr w2, [x0]
|
||||||
|
|
||||||
|
/* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::CopyStringToUser(void *dst, const void *src, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess16CopyStringToUserEPvPKvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess16CopyStringToUserEPvPKvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess16CopyStringToUserEPvPKvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess16CopyStringToUserEPvPKvm:
|
||||||
|
/* Check if there's anything to copy. */
|
||||||
|
cmp x2, #0
|
||||||
|
b.eq 3f
|
||||||
|
|
||||||
|
/* Keep track of the start address and last address. */
|
||||||
|
mov x4, x1
|
||||||
|
add x3, x1, x2
|
||||||
|
|
||||||
|
1: /* We're copying memory byte-by-byte. */
|
||||||
|
ldrb w2, [x1], #1
|
||||||
|
sttrb w2, [x0]
|
||||||
|
add x0, x0, #1
|
||||||
|
|
||||||
|
/* If we read a null terminator, we're done. */
|
||||||
|
cmp w2, #0
|
||||||
|
b.eq 2f
|
||||||
|
|
||||||
|
/* Check if we're done. */
|
||||||
|
cmp x1, x3
|
||||||
|
b.ne 1b
|
||||||
|
|
||||||
|
2: /* We're done, and we copied some amount of data from the string. */
|
||||||
|
sub x0, x1, x4
|
||||||
|
ret
|
||||||
|
|
||||||
|
3: /* We're done, and there was no string data. */
|
||||||
|
mov x0, #0
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::ClearMemory(void *dst, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess11ClearMemoryEPvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess11ClearMemoryEPvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess11ClearMemoryEPvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess11ClearMemoryEPvm:
|
||||||
|
/* Check if there's anything to clear. */
|
||||||
|
cmp x1, #0
|
||||||
|
b.eq 2f
|
||||||
|
|
||||||
|
/* Keep track of the last address. */
|
||||||
|
add x2, x0, x1
|
||||||
|
|
||||||
|
1: /* We're copying memory byte-by-byte. */
|
||||||
|
sttrb wzr, [x0]
|
||||||
|
add x0, x0, #1
|
||||||
|
cmp x0, x2
|
||||||
|
b.ne 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::ClearMemoryAligned32Bit(void *dst, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned32BitEPvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned32BitEPvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned32BitEPvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned32BitEPvm:
|
||||||
|
/* Check if there are 0x40 bytes to clear. */
|
||||||
|
cmp x1, #0x3F
|
||||||
|
b.ls 2f
|
||||||
|
sttr xzr, [x0, #0x00]
|
||||||
|
sttr xzr, [x0, #0x08]
|
||||||
|
sttr xzr, [x0, #0x10]
|
||||||
|
sttr xzr, [x0, #0x18]
|
||||||
|
sttr xzr, [x0, #0x20]
|
||||||
|
sttr xzr, [x0, #0x28]
|
||||||
|
sttr xzr, [x0, #0x30]
|
||||||
|
sttr xzr, [x0, #0x38]
|
||||||
|
add x0, x0, #0x40
|
||||||
|
sub x1, x1, #0x40
|
||||||
|
b _ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned32BitEPvm
|
||||||
|
|
||||||
|
1: /* We have less than 0x40 bytes to clear. */
|
||||||
|
cmp x1, #0
|
||||||
|
b.eq 2f
|
||||||
|
sttr wzr, [x0]
|
||||||
|
add x0, x0, #4
|
||||||
|
sub x1, x1, #4
|
||||||
|
b 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::ClearMemoryAligned64Bit(void *dst, size_t size) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned64BitEPvm, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned64BitEPvm
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned64BitEPvm, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned64BitEPvm:
|
||||||
|
/* Check if there are 0x40 bytes to clear. */
|
||||||
|
cmp x1, #0x3F
|
||||||
|
b.ls 2f
|
||||||
|
sttr xzr, [x0, #0x00]
|
||||||
|
sttr xzr, [x0, #0x08]
|
||||||
|
sttr xzr, [x0, #0x10]
|
||||||
|
sttr xzr, [x0, #0x18]
|
||||||
|
sttr xzr, [x0, #0x20]
|
||||||
|
sttr xzr, [x0, #0x28]
|
||||||
|
sttr xzr, [x0, #0x30]
|
||||||
|
sttr xzr, [x0, #0x38]
|
||||||
|
add x0, x0, #0x40
|
||||||
|
sub x1, x1, #0x40
|
||||||
|
b _ZN3ams4kern4arch5arm6415UserspaceAccess23ClearMemoryAligned64BitEPvm
|
||||||
|
|
||||||
|
1: /* We have less than 0x40 bytes to clear. */
|
||||||
|
cmp x1, #0
|
||||||
|
b.eq 2f
|
||||||
|
sttr xzr, [x0]
|
||||||
|
add x0, x0, #8
|
||||||
|
sub x1, x1, #8
|
||||||
|
b 1b
|
||||||
|
|
||||||
|
2: /* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
|
/* ams::kern::arch::arm64::UserspaceAccess::ClearMemorySize32Bit(void *dst) */
|
||||||
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess20ClearMemorySize32BitEPv, "ax", %progbits
|
||||||
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess20ClearMemorySize32BitEPv
|
||||||
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess20ClearMemorySize32BitEPv, %function
|
||||||
|
.balign 0x10
|
||||||
|
_ZN3ams4kern4arch5arm6415UserspaceAccess20ClearMemorySize32BitEPv:
|
||||||
|
/* Just store a zero. */
|
||||||
|
sttr wzr, [x0]
|
||||||
|
|
||||||
|
/* We're done. */
|
||||||
|
mov x0, #1
|
||||||
|
ret
|
||||||
|
|
||||||
/* ams::kern::arch::arm64::UserspaceAccess::StoreDataCache(uintptr_t start, uintptr_t end) */
|
/* ams::kern::arch::arm64::UserspaceAccess::StoreDataCache(uintptr_t start, uintptr_t end) */
|
||||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm
|
||||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, %function
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm:
|
_ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm:
|
||||||
/* Check if we have any work to do. */
|
/* Check if we have any work to do. */
|
||||||
cmp x1, x0
|
cmp x1, x0
|
||||||
|
@ -84,6 +452,7 @@ _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm:
|
||||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm
|
||||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, %function
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm:
|
_ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm:
|
||||||
/* Check if we have any work to do. */
|
/* Check if we have any work to do. */
|
||||||
cmp x1, x0
|
cmp x1, x0
|
||||||
|
@ -103,6 +472,7 @@ _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm:
|
||||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm
|
||||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, %function
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm:
|
_ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm:
|
||||||
/* Check if we have any work to do. */
|
/* Check if we have any work to do. */
|
||||||
cmp x1, x0
|
cmp x1, x0
|
||||||
|
@ -122,6 +492,7 @@ _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm:
|
||||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm
|
.global _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm
|
||||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, %function
|
.type _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm:
|
_ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm:
|
||||||
/* Check if we have any work to do. */
|
/* Check if we have any work to do. */
|
||||||
cmp x1, x0
|
cmp x1, x0
|
||||||
|
@ -143,5 +514,6 @@ _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm:
|
||||||
.section .text._ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, "ax", %progbits
|
.section .text._ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, "ax", %progbits
|
||||||
.global _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv
|
.global _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv
|
||||||
.type _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, %function
|
.type _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, %function
|
||||||
|
.balign 0x10
|
||||||
_ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv:
|
_ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv:
|
||||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
|
@ -291,7 +291,7 @@ namespace ams::kern::board::nintendo::nx {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KSystemControl::ReadWriteRegister(u32 *out, ams::svc::PhysicalAddress address, u32 mask, u32 value) {
|
void KSystemControl::ReadWriteRegister(u32 *out, ams::svc::PhysicalAddress address, u32 mask, u32 value) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Randomness. */
|
/* Randomness. */
|
||||||
|
|
|
@ -686,7 +686,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::SetMemoryPermission(KProcessAddress addr, size_t size, ams::svc::MemoryPermission svc_perm) {
|
Result KPageTableBase::SetMemoryPermission(KProcessAddress addr, size_t size, ams::svc::MemoryPermission svc_perm) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::SetProcessMemoryPermission(KProcessAddress addr, size_t size, ams::svc::MemoryPermission svc_perm) {
|
Result KPageTableBase::SetProcessMemoryPermission(KProcessAddress addr, size_t size, ams::svc::MemoryPermission svc_perm) {
|
||||||
|
@ -750,7 +750,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::SetHeapSize(KProcessAddress *out, size_t size) {
|
Result KPageTableBase::SetHeapSize(KProcessAddress *out, size_t size) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::SetMaxHeapSize(size_t size) {
|
Result KPageTableBase::SetMaxHeapSize(size_t size) {
|
||||||
|
@ -867,11 +867,11 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::MapStatic(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm) {
|
Result KPageTableBase::MapStatic(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::MapRegion(KMemoryRegionType region_type, KMemoryPermission perm) {
|
Result KPageTableBase::MapRegion(KMemoryRegionType region_type, KMemoryPermission perm) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::MapPages(KProcessAddress *out_addr, size_t num_pages, size_t alignment, KPhysicalAddress phys_addr, bool is_pa_valid, KProcessAddress region_start, size_t region_num_pages, KMemoryState state, KMemoryPermission perm) {
|
Result KPageTableBase::MapPages(KProcessAddress *out_addr, size_t num_pages, size_t alignment, KPhysicalAddress phys_addr, bool is_pa_valid, KProcessAddress region_start, size_t region_num_pages, KMemoryState state, KMemoryPermission perm) {
|
||||||
|
@ -915,7 +915,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::UnmapPages(KProcessAddress address, size_t num_pages, KMemoryState state) {
|
Result KPageTableBase::UnmapPages(KProcessAddress address, size_t num_pages, KMemoryState state) {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KPageTableBase::MapPageGroup(KProcessAddress *out_addr, const KPageGroup &pg, KProcessAddress region_start, size_t region_num_pages, KMemoryState state, KMemoryPermission perm) {
|
Result KPageTableBase::MapPageGroup(KProcessAddress *out_addr, const KPageGroup &pg, KProcessAddress region_start, size_t region_num_pages, KMemoryState state, KMemoryPermission perm) {
|
||||||
|
|
|
@ -26,7 +26,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KProcess::Finalize() {
|
void KProcess::Finalize() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KProcess::Initialize(const ams::svc::CreateProcessParameter ¶ms) {
|
Result KProcess::Initialize(const ams::svc::CreateProcessParameter ¶ms) {
|
||||||
|
@ -153,7 +153,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KProcess::DoWorkerTask() {
|
void KProcess::DoWorkerTask() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
Result KProcess::CreateThreadLocalRegion(KProcessAddress *out) {
|
Result KProcess::CreateThreadLocalRegion(KProcessAddress *out) {
|
||||||
|
@ -370,7 +370,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KProcess::SetPreemptionState() {
|
void KProcess::SetPreemptionState() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ namespace ams::kern {
|
||||||
Result KSynchronization::Wait(s32 *out_index, KSynchronizationObject **objects, const s32 num_objects, s64 timeout) {
|
Result KSynchronization::Wait(s32 *out_index, KSynchronizationObject **objects, const s32 num_objects, s64 timeout) {
|
||||||
MESOSPHERE_ASSERT_THIS();
|
MESOSPHERE_ASSERT_THIS();
|
||||||
|
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
void KSynchronization::OnAvailable(KSynchronizationObject *object) {
|
void KSynchronization::OnAvailable(KSynchronizationObject *object) {
|
||||||
|
|
|
@ -28,11 +28,9 @@ namespace ams::kern {
|
||||||
const uintptr_t stack_bottom = stack_top - PageSize;
|
const uintptr_t stack_bottom = stack_top - PageSize;
|
||||||
|
|
||||||
KPhysicalAddress stack_paddr = Null<KPhysicalAddress>;
|
KPhysicalAddress stack_paddr = Null<KPhysicalAddress>;
|
||||||
MESOSPHERE_TODO("MESOSPHERE_ABORT_UNLESS(Kernel::GetSupervisorPageTable().GetPhysicalAddress(&stack_paddr, stack_bottom));");
|
MESOSPHERE_ABORT_UNLESS(Kernel::GetKernelPageTable().GetPhysicalAddress(&stack_paddr, stack_bottom));
|
||||||
(void)stack_bottom;
|
|
||||||
|
|
||||||
MESOSPHERE_TODO("MESOSPHERE_R_ABORT_UNLESS(Kernel::GetSupervisorPageTable().Unmap(...);");
|
MESOSPHERE_R_ABORT_UNLESS(Kernel::GetKernelPageTable().UnmapPages(stack_bottom, 1, KMemoryState_Kernel));
|
||||||
(void)stack_paddr;
|
|
||||||
|
|
||||||
/* Free the stack page. */
|
/* Free the stack page. */
|
||||||
KPageBuffer::Free(KPageBuffer::FromPhysicalAddress(stack_paddr));
|
KPageBuffer::Free(KPageBuffer::FromPhysicalAddress(stack_paddr));
|
||||||
|
@ -253,7 +251,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KThread::Finalize() {
|
void KThread::Finalize() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool KThread::IsSignaled() const {
|
bool KThread::IsSignaled() const {
|
||||||
|
@ -281,7 +279,7 @@ namespace ams::kern {
|
||||||
}
|
}
|
||||||
|
|
||||||
void KThread::DoWorkerTask() {
|
void KThread::DoWorkerTask() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
void KThread::DisableCoreMigration() {
|
void KThread::DisableCoreMigration() {
|
||||||
|
@ -588,7 +586,7 @@ namespace ams::kern {
|
||||||
void KThread::Exit() {
|
void KThread::Exit() {
|
||||||
MESOSPHERE_ASSERT_THIS();
|
MESOSPHERE_ASSERT_THIS();
|
||||||
|
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
|
|
||||||
MESOSPHERE_PANIC("KThread::Exit() would return");
|
MESOSPHERE_PANIC("KThread::Exit() would return");
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
namespace ams::kern {
|
namespace ams::kern {
|
||||||
|
|
||||||
void KWaitObject::OnTimer() {
|
void KWaitObject::OnTimer() {
|
||||||
MESOSPHERE_TODO_IMPLEMENT();
|
MESOSPHERE_UNIMPLEMENTED();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue