fusee: Move nxboot hand-off to IRAM.

fusee/exosphere: Minor cleanup.
This commit is contained in:
hexkyz 2018-09-15 21:08:58 +01:00
parent d61ec20679
commit 39d812f434
19 changed files with 370 additions and 290 deletions

View file

@ -18,7 +18,6 @@
#include "utils.h" #include "utils.h"
#include "memory_map.h" #include "memory_map.h"
#include "bootup.h" #include "bootup.h"
#include "cpu_context.h" #include "cpu_context.h"
#include "package2.h" #include "package2.h"
@ -38,7 +37,6 @@
extern void *__start_cold_addr; extern void *__start_cold_addr;
extern size_t __bin_size; extern size_t __bin_size;
static const uint8_t new_device_key_sources[MASTERKEY_NUM_NEW_DEVICE_KEYS][0x10] = { static const uint8_t new_device_key_sources[MASTERKEY_NUM_NEW_DEVICE_KEYS][0x10] = {
{0x8B, 0x4E, 0x1C, 0x22, 0x42, 0x07, 0xC8, 0x73, 0x56, 0x94, 0x08, 0x8B, 0xCC, 0x47, 0x0F, 0x5D}, /* 4.x New Device Key Source. */ {0x8B, 0x4E, 0x1C, 0x22, 0x42, 0x07, 0xC8, 0x73, 0x56, 0x94, 0x08, 0x8B, 0xCC, 0x47, 0x0F, 0x5D}, /* 4.x New Device Key Source. */
{0x6C, 0xEF, 0xC6, 0x27, 0x8B, 0xEC, 0x8A, 0x91, 0x99, 0xAB, 0x24, 0xAC, 0x4F, 0x1C, 0x8F, 0x1C}, /* 5.x New Device Key Source. */ {0x6C, 0xEF, 0xC6, 0x27, 0x8B, 0xEC, 0x8A, 0x91, 0x99, 0xAB, 0x24, 0xAC, 0x4F, 0x1C, 0x8F, 0x1C}, /* 5.x New Device Key Source. */
@ -85,15 +83,15 @@ static void setup_se(void) {
intr_initialize_gic_nonsecure(); intr_initialize_gic_nonsecure();
/* Perform some sanity initialization. */ /* Perform some sanity initialization. */
volatile security_engine_t *p_security_engine = get_security_engine(); volatile tegra_se_t *se = se_get_regs();
p_security_engine->_0x0 &= 0xFFFEFFFF; /* Clear bit 16. */ se->_0x0 &= 0xFFFEFFFF; /* Clear bit 16. */
(void)(SECURITY_ENGINE->FLAGS_REG); (void)(se->FLAGS_REG);
__dsb_sy(); __dsb_sy();
p_security_engine->_0x4 = 0; se->_0x4 = 0;
p_security_engine->AES_KEY_READ_DISABLE_REG = 0; se->AES_KEY_READ_DISABLE_REG = 0;
p_security_engine->RSA_KEY_READ_DISABLE_REG = 0; se->RSA_KEY_READ_DISABLE_REG = 0;
p_security_engine->_0x0 &= 0xFFFFFFFB; se->_0x0 &= 0xFFFFFFFB;
/* Currently unknown what each flag does. */ /* Currently unknown what each flag does. */
for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) { for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) {
@ -140,7 +138,6 @@ static void setup_se(void) {
/* Generate test vector for our keys. */ /* Generate test vector for our keys. */
se_generate_stored_vector(); se_generate_stored_vector();
} }
static void setup_boot_config(void) { static void setup_boot_config(void) {
@ -169,7 +166,6 @@ static void package2_crypt_ctr(unsigned int master_key_rev, void *dst, size_t ds
se_aes_ctr_crypt(KEYSLOT_SWITCH_PACKAGE2KEY, dst, dst_size, src, src_size, ctr, ctr_size); se_aes_ctr_crypt(KEYSLOT_SWITCH_PACKAGE2KEY, dst, dst_size, src, src_size, ctr, ctr_size);
} }
static void verify_header_signature(package2_header_t *header) { static void verify_header_signature(package2_header_t *header) {
const uint8_t *modulus; const uint8_t *modulus;
@ -506,7 +502,8 @@ void load_package2(coldboot_crt0_reloc_list_t *reloc_list) {
randomcache_init(); randomcache_init();
/* memclear the initial copy of Exosphere running in IRAM (relocated to TZRAM by earlier code). */ /* memclear the initial copy of Exosphere running in IRAM (relocated to TZRAM by earlier code). */
//memset((void *)reloc_list->reloc_base, 0, reloc_list->loaded_bin_size); /* memset((void *)reloc_list->reloc_base, 0, reloc_list->loaded_bin_size); */
/* Let NX Bootloader know that we're running. */ /* Let NX Bootloader know that we're running. */
MAILBOX_NX_BOOTLOADER_IS_SECMON_AWAKE = 1; MAILBOX_NX_BOOTLOADER_IS_SECMON_AWAKE = 1;

View file

@ -36,9 +36,8 @@ static unsigned int g_se_exp_sizes[KEYSLOT_RSA_MAX];
static bool g_se_generated_vector = false; static bool g_se_generated_vector = false;
static uint8_t g_se_stored_test_vector[0x10]; static uint8_t g_se_stored_test_vector[0x10];
/* Initialize a SE linked list. */ /* Initialize a SE linked list. */
void ll_init(se_ll_t *ll, void *buffer, size_t size) { void ll_init(volatile se_ll_t *ll, void *buffer, size_t size) {
ll->num_entries = 0; /* 1 Entry. */ ll->num_entries = 0; /* 1 Entry. */
if (buffer != NULL) { if (buffer != NULL) {
@ -62,7 +61,7 @@ void set_security_engine_callback(unsigned int (*callback)(void)) {
/* Fires on Security Engine operation completion. */ /* Fires on Security Engine operation completion. */
void se_operation_completed(void) { void se_operation_completed(void) {
SECURITY_ENGINE->INT_ENABLE_REG = 0; se_get_regs()->INT_ENABLE_REG = 0;
if (g_se_callback != NULL) { if (g_se_callback != NULL) {
g_se_callback(); g_se_callback();
g_se_callback = NULL; g_se_callback = NULL;
@ -70,13 +69,14 @@ void se_operation_completed(void) {
} }
void se_check_error_status_reg(void) { void se_check_error_status_reg(void) {
if (SECURITY_ENGINE->ERR_STATUS_REG) { if (se_get_regs()->ERR_STATUS_REG) {
generic_panic(); generic_panic();
} }
} }
void se_check_for_error(void) { void se_check_for_error(void) {
if (SECURITY_ENGINE->INT_STATUS_REG & 0x10000 || SECURITY_ENGINE->FLAGS_REG & 3 || SECURITY_ENGINE->ERR_STATUS_REG) { volatile tegra_se_t *se = se_get_regs();
if (se->INT_STATUS_REG & 0x10000 || se->FLAGS_REG & 3 || se->ERR_STATUS_REG) {
generic_panic(); generic_panic();
} }
} }
@ -86,12 +86,11 @@ void se_trigger_interrupt(void) {
} }
void se_verify_flags_cleared(void) { void se_verify_flags_cleared(void) {
if (SECURITY_ENGINE->FLAGS_REG & 3) { if (se_get_regs()->FLAGS_REG & 3) {
generic_panic(); generic_panic();
} }
} }
void se_generate_test_vector(void *vector) { void se_generate_test_vector(void *vector) {
/* TODO: Implement real test vector generation. */ /* TODO: Implement real test vector generation. */
memset(vector, 0, 0x10); memset(vector, 0, 0x10);
@ -122,23 +121,27 @@ void se_generate_stored_vector(void) {
/* Set the flags for an AES keyslot. */ /* Set the flags for an AES keyslot. */
void set_aes_keyslot_flags(unsigned int keyslot, unsigned int flags) { void set_aes_keyslot_flags(unsigned int keyslot, unsigned int flags) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
/* Misc flags. */ /* Misc flags. */
if (flags & ~0x80) { if (flags & ~0x80) {
SECURITY_ENGINE->AES_KEYSLOT_FLAGS[keyslot] = ~flags; se->AES_KEYSLOT_FLAGS[keyslot] = ~flags;
} }
/* Disable keyslot reads. */ /* Disable keyslot reads. */
if (flags & 0x80) { if (flags & 0x80) {
SECURITY_ENGINE->AES_KEY_READ_DISABLE_REG &= ~(1 << keyslot); se->AES_KEY_READ_DISABLE_REG &= ~(1 << keyslot);
} }
} }
/* Set the flags for an RSA keyslot. */ /* Set the flags for an RSA keyslot. */
void set_rsa_keyslot_flags(unsigned int keyslot, unsigned int flags) { void set_rsa_keyslot_flags(unsigned int keyslot, unsigned int flags) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_RSA_MAX) { if (keyslot >= KEYSLOT_RSA_MAX) {
generic_panic(); generic_panic();
} }
@ -146,28 +149,32 @@ void set_rsa_keyslot_flags(unsigned int keyslot, unsigned int flags) {
/* Misc flags. */ /* Misc flags. */
if (flags & ~0x80) { if (flags & ~0x80) {
/* TODO: Why are flags assigned this way? */ /* TODO: Why are flags assigned this way? */
SECURITY_ENGINE->RSA_KEYSLOT_FLAGS[keyslot] = (((flags >> 4) & 4) | (flags & 3)) ^ 7; se->RSA_KEYSLOT_FLAGS[keyslot] = (((flags >> 4) & 4) | (flags & 3)) ^ 7;
} }
/* Disable keyslot reads. */ /* Disable keyslot reads. */
if (flags & 0x80) { if (flags & 0x80) {
SECURITY_ENGINE->RSA_KEY_READ_DISABLE_REG &= ~(1 << keyslot); se->RSA_KEY_READ_DISABLE_REG &= ~(1 << keyslot);
} }
} }
void clear_aes_keyslot(unsigned int keyslot) { void clear_aes_keyslot(unsigned int keyslot) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
/* Zero out the whole keyslot and IV. */ /* Zero out the whole keyslot and IV. */
for (unsigned int i = 0; i < 0x10; i++) { for (unsigned int i = 0; i < 0x10; i++) {
SECURITY_ENGINE->AES_KEYTABLE_ADDR = (keyslot << 4) | i; se->AES_KEYTABLE_ADDR = (keyslot << 4) | i;
SECURITY_ENGINE->AES_KEYTABLE_DATA = 0; se->AES_KEYTABLE_DATA = 0;
} }
} }
void clear_rsa_keyslot(unsigned int keyslot) { void clear_rsa_keyslot(unsigned int keyslot) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_RSA_MAX) { if (keyslot >= KEYSLOT_RSA_MAX) {
generic_panic(); generic_panic();
} }
@ -175,40 +182,44 @@ void clear_rsa_keyslot(unsigned int keyslot) {
/* Zero out the whole keyslot. */ /* Zero out the whole keyslot. */
for (unsigned int i = 0; i < 0x40; i++) { for (unsigned int i = 0; i < 0x40; i++) {
/* Select Keyslot Modulus[i] */ /* Select Keyslot Modulus[i] */
SECURITY_ENGINE->RSA_KEYTABLE_ADDR = (keyslot << 7) | i | 0x40; se->RSA_KEYTABLE_ADDR = (keyslot << 7) | i | 0x40;
SECURITY_ENGINE->RSA_KEYTABLE_DATA = 0; se->RSA_KEYTABLE_DATA = 0;
} }
for (unsigned int i = 0; i < 0x40; i++) { for (unsigned int i = 0; i < 0x40; i++) {
/* Select Keyslot Expontent[i] */ /* Select Keyslot Expontent[i] */
SECURITY_ENGINE->RSA_KEYTABLE_ADDR = (keyslot << 7) | i; se->RSA_KEYTABLE_ADDR = (keyslot << 7) | i;
SECURITY_ENGINE->RSA_KEYTABLE_DATA = 0; se->RSA_KEYTABLE_DATA = 0;
} }
} }
void set_aes_keyslot(unsigned int keyslot, const void *key, size_t key_size) { void set_aes_keyslot(unsigned int keyslot, const void *key, size_t key_size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX || key_size > KEYSIZE_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX || key_size > KEYSIZE_AES_MAX) {
generic_panic(); generic_panic();
} }
for (size_t i = 0; i < (key_size >> 2); i++) { for (size_t i = 0; i < (key_size >> 2); i++) {
SECURITY_ENGINE->AES_KEYTABLE_ADDR = (keyslot << 4) | i; se->AES_KEYTABLE_ADDR = (keyslot << 4) | i;
SECURITY_ENGINE->AES_KEYTABLE_DATA = read32le(key, 4 * i); se->AES_KEYTABLE_DATA = read32le(key, 4 * i);
} }
} }
void set_rsa_keyslot(unsigned int keyslot, const void *modulus, size_t modulus_size, const void *exponent, size_t exp_size) { void set_rsa_keyslot(unsigned int keyslot, const void *modulus, size_t modulus_size, const void *exponent, size_t exp_size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_RSA_MAX || modulus_size > KEYSIZE_RSA_MAX || exp_size > KEYSIZE_RSA_MAX) { if (keyslot >= KEYSLOT_RSA_MAX || modulus_size > KEYSIZE_RSA_MAX || exp_size > KEYSIZE_RSA_MAX) {
generic_panic(); generic_panic();
} }
for (size_t i = 0; i < (modulus_size >> 2); i++) { for (size_t i = 0; i < (modulus_size >> 2); i++) {
SECURITY_ENGINE->RSA_KEYTABLE_ADDR = (keyslot << 7) | 0x40 | i; se->RSA_KEYTABLE_ADDR = (keyslot << 7) | 0x40 | i;
SECURITY_ENGINE->RSA_KEYTABLE_DATA = read32be(modulus, (4 * (modulus_size >> 2)) - (4 * i) - 4); se->RSA_KEYTABLE_DATA = read32be(modulus, (4 * (modulus_size >> 2)) - (4 * i) - 4);
} }
for (size_t i = 0; i < (exp_size >> 2); i++) { for (size_t i = 0; i < (exp_size >> 2); i++) {
SECURITY_ENGINE->RSA_KEYTABLE_ADDR = (keyslot << 7) | i; se->RSA_KEYTABLE_ADDR = (keyslot << 7) | i;
SECURITY_ENGINE->RSA_KEYTABLE_DATA = read32be(exponent, (4 * (exp_size >> 2)) - (4 * i) - 4); se->RSA_KEYTABLE_DATA = read32be(exponent, (4 * (exp_size >> 2)) - (4 * i) - 4);
} }
g_se_modulus_sizes[keyslot] = modulus_size; g_se_modulus_sizes[keyslot] = modulus_size;
@ -216,48 +227,56 @@ void set_rsa_keyslot(unsigned int keyslot, const void *modulus, size_t modulus_
} }
void set_aes_keyslot_iv(unsigned int keyslot, const void *iv, size_t iv_size) { void set_aes_keyslot_iv(unsigned int keyslot, const void *iv, size_t iv_size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX || iv_size > 0x10) { if (keyslot >= KEYSLOT_AES_MAX || iv_size > 0x10) {
generic_panic(); generic_panic();
} }
for (size_t i = 0; i < (iv_size >> 2); i++) { for (size_t i = 0; i < (iv_size >> 2); i++) {
SECURITY_ENGINE->AES_KEYTABLE_ADDR = (keyslot << 4) | 8 | i; se->AES_KEYTABLE_ADDR = (keyslot << 4) | 8 | i;
SECURITY_ENGINE->AES_KEYTABLE_DATA = read32le(iv, 4 * i); se->AES_KEYTABLE_DATA = read32le(iv, 4 * i);
} }
} }
void clear_aes_keyslot_iv(unsigned int keyslot) { void clear_aes_keyslot_iv(unsigned int keyslot) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
for (size_t i = 0; i < (0x10 >> 2); i++) { for (size_t i = 0; i < (0x10 >> 2); i++) {
SECURITY_ENGINE->AES_KEYTABLE_ADDR = (keyslot << 4) | 8 | i; se->AES_KEYTABLE_ADDR = (keyslot << 4) | 8 | i;
SECURITY_ENGINE->AES_KEYTABLE_DATA = 0; se->AES_KEYTABLE_DATA = 0;
} }
} }
void set_se_ctr(const void *ctr) { void set_se_ctr(const void *ctr) {
for (unsigned int i = 0; i < 4; i++) { for (unsigned int i = 0; i < 4; i++) {
SECURITY_ENGINE->CRYPTO_CTR_REG[i] = read32le(ctr, i * 4); se_get_regs()->CRYPTO_CTR_REG[i] = read32le(ctr, i * 4);
} }
} }
void decrypt_data_into_keyslot(unsigned int keyslot_dst, unsigned int keyslot_src, const void *wrapped_key, size_t wrapped_key_size) { void decrypt_data_into_keyslot(unsigned int keyslot_dst, unsigned int keyslot_src, const void *wrapped_key, size_t wrapped_key_size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot_dst >= KEYSLOT_AES_MAX || keyslot_src >= KEYSIZE_AES_MAX || wrapped_key_size > KEYSIZE_AES_MAX) { if (keyslot_dst >= KEYSLOT_AES_MAX || keyslot_src >= KEYSIZE_AES_MAX || wrapped_key_size > KEYSIZE_AES_MAX) {
generic_panic(); generic_panic();
} }
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_DEC | DST_KEYTAB); se->CONFIG_REG = (ALG_AES_DEC | DST_KEYTAB);
SECURITY_ENGINE->CRYPTO_REG = keyslot_src << 24; se->CRYPTO_REG = keyslot_src << 24;
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
SECURITY_ENGINE->CRYPTO_KEYTABLE_DST_REG = keyslot_dst << 8; se->CRYPTO_KEYTABLE_DST_REG = keyslot_dst << 8;
flush_dcache_range(wrapped_key, (const uint8_t *)wrapped_key + wrapped_key_size); flush_dcache_range(wrapped_key, (const uint8_t *)wrapped_key + wrapped_key_size);
trigger_se_blocking_op(OP_START, NULL, 0, wrapped_key, wrapped_key_size); trigger_se_blocking_op(OP_START, NULL, 0, wrapped_key, wrapped_key_size);
} }
void se_aes_crypt_insecure_internal(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, unsigned int crypt_config, bool encrypt, unsigned int (*callback)(void)) { void se_aes_crypt_insecure_internal(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, unsigned int crypt_config, bool encrypt, unsigned int (*callback)(void)) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
@ -268,34 +287,34 @@ void se_aes_crypt_insecure_internal(unsigned int keyslot, uint32_t out_ll_paddr,
/* Setup Config register. */ /* Setup Config register. */
if (encrypt) { if (encrypt) {
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY); se->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY);
} else { } else {
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_DEC | DST_MEMORY); se->CONFIG_REG = (ALG_AES_DEC | DST_MEMORY);
} }
/* Setup Crypto register. */ /* Setup Crypto register. */
SECURITY_ENGINE->CRYPTO_REG = crypt_config | (keyslot << 24) | (encrypt << 8); se->CRYPTO_REG = crypt_config | (keyslot << 24) | (encrypt << 8);
/* Mark this encryption as insecure -- this makes the SE not a secure busmaster. */ /* Mark this encryption as insecure -- this makes the SE not a secure busmaster. */
SECURITY_ENGINE->CRYPTO_REG |= 0x80000000; se->CRYPTO_REG |= 0x80000000;
/* Appropriate number of blocks. */ /* Appropriate number of blocks. */
SECURITY_ENGINE->BLOCK_COUNT_REG = (size >> 4) - 1; se->BLOCK_COUNT_REG = (size >> 4) - 1;
/* Set the callback, for after the async operation. */ /* Set the callback, for after the async operation. */
set_security_engine_callback(callback); set_security_engine_callback(callback);
/* Enable SE Interrupt firing for async op. */ /* Enable SE Interrupt firing for async op. */
SECURITY_ENGINE->INT_ENABLE_REG = 0x10; se->INT_ENABLE_REG = 0x10;
/* Setup Input/Output lists */ /* Setup Input/Output lists */
SECURITY_ENGINE->IN_LL_ADDR_REG = in_ll_paddr; se->IN_LL_ADDR_REG = in_ll_paddr;
SECURITY_ENGINE->OUT_LL_ADDR_REG = out_ll_paddr; se->OUT_LL_ADDR_REG = out_ll_paddr;
/* Set registers for operation. */ /* Set registers for operation. */
SECURITY_ENGINE->ERR_STATUS_REG = SECURITY_ENGINE->ERR_STATUS_REG; se->ERR_STATUS_REG = se->ERR_STATUS_REG;
SECURITY_ENGINE->INT_STATUS_REG = SECURITY_ENGINE->INT_STATUS_REG; se->INT_STATUS_REG = se->INT_STATUS_REG;
SECURITY_ENGINE->OPERATION_REG = 1; se->OPERATION_REG = 1;
/* Ensure writes go through. */ /* Ensure writes go through. */
__dsb_ish(); __dsb_ish();
@ -303,7 +322,7 @@ void se_aes_crypt_insecure_internal(unsigned int keyslot, uint32_t out_ll_paddr,
void se_aes_ctr_crypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, const void *ctr, unsigned int (*callback)(void)) { void se_aes_ctr_crypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, const void *ctr, unsigned int (*callback)(void)) {
/* Unknown what this write does, but official code writes it for CTR mode. */ /* Unknown what this write does, but official code writes it for CTR mode. */
SECURITY_ENGINE->_0x80C = 1; se_get_regs()->SPARE_0 = 1;
set_se_ctr(ctr); set_se_ctr(ctr);
se_aes_crypt_insecure_internal(keyslot, out_ll_paddr, in_ll_paddr, size, 0x81E, true, callback); se_aes_crypt_insecure_internal(keyslot, out_ll_paddr, in_ll_paddr, size, 0x81E, true, callback);
} }
@ -318,8 +337,8 @@ void se_aes_cbc_decrypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, ui
se_aes_crypt_insecure_internal(keyslot, out_ll_paddr, in_ll_paddr, size, 0x66, false, callback); se_aes_crypt_insecure_internal(keyslot, out_ll_paddr, in_ll_paddr, size, 0x66, false, callback);
} }
void se_exp_mod(unsigned int keyslot, void *buf, size_t size, unsigned int (*callback)(void)) { void se_exp_mod(unsigned int keyslot, void *buf, size_t size, unsigned int (*callback)(void)) {
volatile tegra_se_t *se = se_get_regs();
uint8_t stack_buf[KEYSIZE_RSA_MAX]; uint8_t stack_buf[KEYSIZE_RSA_MAX];
if (keyslot >= KEYSLOT_RSA_MAX || size > KEYSIZE_RSA_MAX) { if (keyslot >= KEYSLOT_RSA_MAX || size > KEYSIZE_RSA_MAX) {
@ -331,24 +350,24 @@ void se_exp_mod(unsigned int keyslot, void *buf, size_t size, unsigned int (*cal
stack_buf[i] = *((uint8_t *)buf + size - i - 1); stack_buf[i] = *((uint8_t *)buf + size - i - 1);
} }
se->CONFIG_REG = (ALG_RSA | DST_RSAREG);
SECURITY_ENGINE->CONFIG_REG = (ALG_RSA | DST_RSAREG); se->RSA_CONFIG = keyslot << 24;
SECURITY_ENGINE->RSA_CONFIG = keyslot << 24; se->RSA_KEY_SIZE_REG = (g_se_modulus_sizes[keyslot] >> 6) - 1;
SECURITY_ENGINE->RSA_KEY_SIZE_REG = (g_se_modulus_sizes[keyslot] >> 6) - 1; se->RSA_EXP_SIZE_REG = g_se_exp_sizes[keyslot] >> 2;
SECURITY_ENGINE->RSA_EXP_SIZE_REG = g_se_exp_sizes[keyslot] >> 2;
set_security_engine_callback(callback); set_security_engine_callback(callback);
/* Enable SE Interrupt firing for async op. */ /* Enable SE Interrupt firing for async op. */
SECURITY_ENGINE->INT_ENABLE_REG = 0x10; se->INT_ENABLE_REG = 0x10;
flush_dcache_range(stack_buf, stack_buf + KEYSIZE_RSA_MAX); flush_dcache_range(stack_buf, stack_buf + KEYSIZE_RSA_MAX);
trigger_se_rsa_op(stack_buf, size); trigger_se_rsa_op(stack_buf, size);
while (!(SECURITY_ENGINE->INT_STATUS_REG & 2)) { /* Wait a while */ } while (!(se->INT_STATUS_REG & 2)) { /* Wait a while */ }
} }
void se_synchronous_exp_mod(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) { void se_synchronous_exp_mod(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs();
uint8_t stack_buf[KEYSIZE_RSA_MAX]; uint8_t stack_buf[KEYSIZE_RSA_MAX];
if (keyslot >= KEYSLOT_RSA_MAX || src_size > KEYSIZE_RSA_MAX || dst_size > KEYSIZE_RSA_MAX) { if (keyslot >= KEYSLOT_RSA_MAX || src_size > KEYSIZE_RSA_MAX || dst_size > KEYSIZE_RSA_MAX) {
@ -360,11 +379,10 @@ void se_synchronous_exp_mod(unsigned int keyslot, void *dst, size_t dst_size, co
stack_buf[i] = *((uint8_t *)src + src_size - i - 1); stack_buf[i] = *((uint8_t *)src + src_size - i - 1);
} }
SECURITY_ENGINE->CONFIG_REG = (ALG_RSA | DST_RSAREG); se->CONFIG_REG = (ALG_RSA | DST_RSAREG);
SECURITY_ENGINE->RSA_CONFIG = keyslot << 24; se->RSA_CONFIG = keyslot << 24;
SECURITY_ENGINE->RSA_KEY_SIZE_REG = (g_se_modulus_sizes[keyslot] >> 6) - 1; se->RSA_KEY_SIZE_REG = (g_se_modulus_sizes[keyslot] >> 6) - 1;
SECURITY_ENGINE->RSA_EXP_SIZE_REG = g_se_exp_sizes[keyslot] >> 2; se->RSA_EXP_SIZE_REG = g_se_exp_sizes[keyslot] >> 2;
flush_dcache_range(stack_buf, stack_buf + KEYSIZE_RSA_MAX); flush_dcache_range(stack_buf, stack_buf + KEYSIZE_RSA_MAX);
trigger_se_blocking_op(OP_START, NULL, 0, stack_buf, src_size); trigger_se_blocking_op(OP_START, NULL, 0, stack_buf, src_size);
@ -382,7 +400,7 @@ void se_get_exp_mod_output(void *buf, size_t size) {
/* Copy endian swapped output. */ /* Copy endian swapped output. */
while (num_dwords) { while (num_dwords) {
*p_out = read32be(SECURITY_ENGINE->RSA_OUTPUT, offset); *p_out = read32be(se_get_regs()->RSA_OUTPUT, offset);
offset += 4; offset += 4;
p_out--; p_out--;
num_dwords--; num_dwords--;
@ -447,22 +465,25 @@ bool se_rsa2048_pss_verify(const void *signature, size_t signature_size, const v
void trigger_se_rsa_op(void *buf, size_t size) { void trigger_se_rsa_op(void *buf, size_t size) {
volatile tegra_se_t *se = se_get_regs();
se_ll_t in_ll; se_ll_t in_ll;
ll_init(&in_ll, (void *)buf, size); ll_init(&in_ll, (void *)buf, size);
/* Set the input LL. */ /* Set the input LL. */
SECURITY_ENGINE->IN_LL_ADDR_REG = (uint32_t) get_physical_address(&in_ll); se->IN_LL_ADDR_REG = (uint32_t) get_physical_address(&in_ll);
/* Set registers for operation. */ /* Set registers for operation. */
SECURITY_ENGINE->ERR_STATUS_REG = SECURITY_ENGINE->ERR_STATUS_REG; se->ERR_STATUS_REG = se->ERR_STATUS_REG;
SECURITY_ENGINE->INT_STATUS_REG = SECURITY_ENGINE->INT_STATUS_REG; se->INT_STATUS_REG = se->INT_STATUS_REG;
SECURITY_ENGINE->OPERATION_REG = 1; se->OPERATION_REG = 1;
/* Ensure writes go through. */ /* Ensure writes go through. */
__dsb_ish(); __dsb_ish();
} }
void trigger_se_blocking_op(unsigned int op, void *dst, size_t dst_size, const void *src, size_t src_size) { void trigger_se_blocking_op(unsigned int op, void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs();
se_ll_t in_ll; se_ll_t in_ll;
se_ll_t out_ll; se_ll_t out_ll;
@ -472,20 +493,19 @@ void trigger_se_blocking_op(unsigned int op, void *dst, size_t dst_size, const v
__dsb_sy(); __dsb_sy();
/* Set the LLs. */ /* Set the LLs. */
SECURITY_ENGINE->IN_LL_ADDR_REG = (uint32_t) get_physical_address(&in_ll); se->IN_LL_ADDR_REG = (uint32_t) get_physical_address(&in_ll);
SECURITY_ENGINE->OUT_LL_ADDR_REG = (uint32_t) get_physical_address(&out_ll); se->OUT_LL_ADDR_REG = (uint32_t) get_physical_address(&out_ll);
/* Set registers for operation. */ /* Set registers for operation. */
SECURITY_ENGINE->ERR_STATUS_REG = SECURITY_ENGINE->ERR_STATUS_REG; se->ERR_STATUS_REG = se->ERR_STATUS_REG;
SECURITY_ENGINE->INT_STATUS_REG = SECURITY_ENGINE->INT_STATUS_REG; se->INT_STATUS_REG = se->INT_STATUS_REG;
SECURITY_ENGINE->OPERATION_REG = op; se->OPERATION_REG = op;
while (!(SECURITY_ENGINE->INT_STATUS_REG & 0x10)) { /* Wait a while */ } while (!(se->INT_STATUS_REG & 0x10)) { /* Wait a while */ }
se_check_for_error(); se_check_for_error();
} }
/* Secure AES Functionality. */ /* Secure AES Functionality. */
void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src, size_t src_size) { void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src, size_t src_size) {
uint8_t block[0x10] = {0}; uint8_t block[0x10] = {0};
@ -501,7 +521,7 @@ void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src,
flush_dcache_range(block, block + sizeof(block)); flush_dcache_range(block, block + sizeof(block));
/* Trigger AES operation. */ /* Trigger AES operation. */
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se_get_regs()->BLOCK_COUNT_REG = 0;
trigger_se_blocking_op(OP_START, block, sizeof(block), block, sizeof(block)); trigger_se_blocking_op(OP_START, block, sizeof(block), block, sizeof(block));
/* Copy output data into dst. */ /* Copy output data into dst. */
@ -512,6 +532,8 @@ void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src,
} }
void se_aes_ctr_crypt(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size, const void *ctr, size_t ctr_size) { void se_aes_ctr_crypt(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size, const void *ctr, size_t ctr_size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX || ctr_size != 0x10) { if (keyslot >= KEYSLOT_AES_MAX || ctr_size != 0x10) {
generic_panic(); generic_panic();
} }
@ -526,15 +548,15 @@ void se_aes_ctr_crypt(unsigned int keyslot, void *dst, size_t dst_size, const vo
unsigned int num_blocks = src_size >> 4; unsigned int num_blocks = src_size >> 4;
/* Unknown what this write does, but official code writes it for CTR mode. */ /* Unknown what this write does, but official code writes it for CTR mode. */
SECURITY_ENGINE->_0x80C = 1; se->SPARE_0 = 1;
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY); se->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY);
SECURITY_ENGINE->CRYPTO_REG = (keyslot << 24) | 0x91E; se->CRYPTO_REG = (keyslot << 24) | 0x91E;
set_se_ctr(ctr); set_se_ctr(ctr);
/* Handle any aligned blocks. */ /* Handle any aligned blocks. */
size_t aligned_size = (size_t)num_blocks << 4; size_t aligned_size = (size_t)num_blocks << 4;
if (aligned_size) { if (aligned_size) {
SECURITY_ENGINE->BLOCK_COUNT_REG = num_blocks - 1; se->BLOCK_COUNT_REG = num_blocks - 1;
trigger_se_blocking_op(OP_START, dst, dst_size, src, aligned_size); trigger_se_blocking_op(OP_START, dst, dst_size, src, aligned_size);
} }
@ -553,13 +575,15 @@ void se_aes_ctr_crypt(unsigned int keyslot, void *dst, size_t dst_size, const vo
} }
void se_aes_ecb_encrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size, unsigned int config_high) { void se_aes_ecb_encrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size, unsigned int config_high) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX || dst_size != 0x10 || src_size != 0x10) { if (keyslot >= KEYSLOT_AES_MAX || dst_size != 0x10 || src_size != 0x10) {
generic_panic(); generic_panic();
} }
/* Set configuration high (256-bit vs 128-bit) based on parameter. */ /* Set configuration high (256-bit vs 128-bit) based on parameter. */
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY) | (config_high << 16); se->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY) | (config_high << 16);
SECURITY_ENGINE->CRYPTO_REG = keyslot << 24 | 0x100; se->CRYPTO_REG = keyslot << 24 | 0x100;
flush_dcache_range((uint8_t *)src, (uint8_t *)src + 0x10); flush_dcache_range((uint8_t *)src, (uint8_t *)src + 0x10);
se_perform_aes_block_operation(dst, 0x10, src, 0x10); se_perform_aes_block_operation(dst, 0x10, src, 0x10);
flush_dcache_range((uint8_t *)dst, (uint8_t *)dst + 0x10); flush_dcache_range((uint8_t *)dst, (uint8_t *)dst + 0x10);
@ -576,12 +600,14 @@ void se_aes_256_ecb_encrypt_block(unsigned int keyslot, void *dst, size_t dst_si
void se_aes_ecb_decrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) { void se_aes_ecb_decrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX || dst_size != 0x10 || src_size != 0x10) { if (keyslot >= KEYSLOT_AES_MAX || dst_size != 0x10 || src_size != 0x10) {
generic_panic(); generic_panic();
} }
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_DEC | DST_MEMORY); se->CONFIG_REG = (ALG_AES_DEC | DST_MEMORY);
SECURITY_ENGINE->CRYPTO_REG = keyslot << 24; se->CRYPTO_REG = keyslot << 24;
flush_dcache_range((uint8_t *)src, (uint8_t *)src + 0x10); flush_dcache_range((uint8_t *)src, (uint8_t *)src + 0x10);
se_perform_aes_block_operation(dst, 0x10, src, 0x10); se_perform_aes_block_operation(dst, 0x10, src, 0x10);
flush_dcache_range((uint8_t *)dst, (uint8_t *)dst + 0x10); flush_dcache_range((uint8_t *)dst, (uint8_t *)dst + 0x10);
@ -600,6 +626,8 @@ void shift_left_xor_rb(uint8_t *key) {
} }
void se_compute_aes_cmac(unsigned int keyslot, void *cmac, size_t cmac_size, const void *data, size_t data_size, unsigned int config_high) { void se_compute_aes_cmac(unsigned int keyslot, void *cmac, size_t cmac_size, const void *data, size_t data_size, unsigned int config_high) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
@ -616,17 +644,16 @@ void se_compute_aes_cmac(unsigned int keyslot, void *cmac, size_t cmac_size, con
shift_left_xor_rb(derived_key); shift_left_xor_rb(derived_key);
} }
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_ENC | DST_HASHREG) | (config_high << 16); se->CONFIG_REG = (ALG_AES_ENC | DST_HASHREG) | (config_high << 16);
SECURITY_ENGINE->CRYPTO_REG = (keyslot << 24) | (0x145); se->CRYPTO_REG = (keyslot << 24) | (0x145);
clear_aes_keyslot_iv(keyslot); clear_aes_keyslot_iv(keyslot);
unsigned int num_blocks = (data_size + 0xF) >> 4; unsigned int num_blocks = (data_size + 0xF) >> 4;
/* Handle aligned blocks. */ /* Handle aligned blocks. */
if (num_blocks > 1) { if (num_blocks > 1) {
SECURITY_ENGINE->BLOCK_COUNT_REG = num_blocks - 2; se->BLOCK_COUNT_REG = num_blocks - 2;
trigger_se_blocking_op(OP_START, NULL, 0, data, data_size); trigger_se_blocking_op(OP_START, NULL, 0, data, data_size);
SECURITY_ENGINE->CRYPTO_REG |= 0x80; se->CRYPTO_REG |= 0x80;
} }
/* Create final block. */ /* Create final block. */
@ -643,13 +670,13 @@ void se_compute_aes_cmac(unsigned int keyslot, void *cmac, size_t cmac_size, con
} }
/* Perform last operation. */ /* Perform last operation. */
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
flush_dcache_range(last_block, last_block + sizeof(last_block)); flush_dcache_range(last_block, last_block + sizeof(last_block));
trigger_se_blocking_op(OP_START, NULL, 0, last_block, sizeof(last_block)); trigger_se_blocking_op(OP_START, NULL, 0, last_block, sizeof(last_block));
/* Copy output CMAC. */ /* Copy output CMAC. */
for (unsigned int i = 0; i < (cmac_size >> 2); i++) { for (unsigned int i = 0; i < (cmac_size >> 2); i++) {
((uint32_t *)cmac)[i] = read32le(SECURITY_ENGINE->HASH_RESULT_REG, i << 2); ((uint32_t *)cmac)[i] = read32le(se->HASH_RESULT_REG, i << 2);
} }
} }
@ -661,42 +688,48 @@ void se_compute_aes_256_cmac(unsigned int keyslot, void *cmac, size_t cmac_size,
} }
void se_aes_256_cbc_encrypt(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size, const void *iv) { void se_aes_256_cbc_encrypt(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size, const void *iv) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX || src_size < 0x10) { if (keyslot >= KEYSLOT_AES_MAX || src_size < 0x10) {
generic_panic(); generic_panic();
} }
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY) | (0x202 << 16); se->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY) | (0x202 << 16);
SECURITY_ENGINE->CRYPTO_REG = (keyslot << 24) | 0x144; se->CRYPTO_REG = (keyslot << 24) | 0x144;
set_aes_keyslot_iv(keyslot, iv, 0x10); set_aes_keyslot_iv(keyslot, iv, 0x10);
SECURITY_ENGINE->BLOCK_COUNT_REG = (src_size >> 4) - 1; se->BLOCK_COUNT_REG = (src_size >> 4) - 1;
trigger_se_blocking_op(OP_START, dst, dst_size, src, src_size); trigger_se_blocking_op(OP_START, dst, dst_size, src, src_size);
} }
/* SHA256 Implementation. */ /* SHA256 Implementation. */
void se_calculate_sha256(void *dst, const void *src, size_t src_size) { void se_calculate_sha256(void *dst, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs();
/* Setup config for SHA256, size = BITS(src_size) */ /* Setup config for SHA256, size = BITS(src_size) */
SECURITY_ENGINE->CONFIG_REG = (ENCMODE_SHA256 | ALG_SHA | DST_HASHREG); se->CONFIG_REG = (ENCMODE_SHA256 | ALG_SHA | DST_HASHREG);
SECURITY_ENGINE->SHA_CONFIG_REG = 1; se->SHA_CONFIG_REG = 1;
SECURITY_ENGINE->SHA_MSG_LENGTH_REG = (uint32_t)(src_size << 3); se->SHA_MSG_LENGTH_REG = (uint32_t)(src_size << 3);
SECURITY_ENGINE->_0x208 = 0; se->_0x208 = 0;
SECURITY_ENGINE->_0x20C = 0; se->_0x20C = 0;
SECURITY_ENGINE->_0x210 = 0; se->_0x210 = 0;
SECURITY_ENGINE->SHA_MSG_LEFT_REG = (uint32_t)(src_size << 3); se->SHA_MSG_LEFT_REG = (uint32_t)(src_size << 3);
SECURITY_ENGINE->_0x218 = 0; se->_0x218 = 0;
SECURITY_ENGINE->_0x21C = 0; se->_0x21C = 0;
SECURITY_ENGINE->_0x220 = 0; se->_0x220 = 0;
/* Trigger the operation. */ /* Trigger the operation. */
trigger_se_blocking_op(OP_START, NULL, 0, src, src_size); trigger_se_blocking_op(OP_START, NULL, 0, src, src_size);
/* Copy output hash. */ /* Copy output hash. */
for (unsigned int i = 0; i < (0x20 >> 2); i++) { for (unsigned int i = 0; i < (0x20 >> 2); i++) {
((uint32_t *)dst)[i] = read32be(SECURITY_ENGINE->HASH_RESULT_REG, i << 2); ((uint32_t *)dst)[i] = read32be(se->HASH_RESULT_REG, i << 2);
} }
} }
/* RNG API */ /* RNG API */
void se_initialize_rng(unsigned int keyslot) { void se_initialize_rng(unsigned int keyslot) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
@ -705,74 +738,80 @@ void se_initialize_rng(unsigned int keyslot) {
/* This will be discarded, when done. */ /* This will be discarded, when done. */
uint8_t output_buf[0x10]; uint8_t output_buf[0x10];
SECURITY_ENGINE->RNG_SRC_CONFIG_REG = 3; /* Entropy enable + Entropy lock enable */ se->RNG_SRC_CONFIG_REG = 3; /* Entropy enable + Entropy lock enable */
SECURITY_ENGINE->RNG_RESEED_INTERVAL_REG = 70001; se->RNG_RESEED_INTERVAL_REG = 70001;
SECURITY_ENGINE->CONFIG_REG = (ALG_RNG | DST_MEMORY); se->CONFIG_REG = (ALG_RNG | DST_MEMORY);
SECURITY_ENGINE->CRYPTO_REG = (keyslot << 24) | 0x108; se->CRYPTO_REG = (keyslot << 24) | 0x108;
SECURITY_ENGINE->RNG_CONFIG_REG = 5; se->RNG_CONFIG_REG = 5;
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
trigger_se_blocking_op(OP_START, output_buf, 0x10, NULL, 0); trigger_se_blocking_op(OP_START, output_buf, 0x10, NULL, 0);
} }
void se_generate_random(unsigned int keyslot, void *dst, size_t size) { void se_generate_random(unsigned int keyslot, void *dst, size_t size) {
volatile tegra_se_t *se = se_get_regs();
if (keyslot >= KEYSLOT_AES_MAX) { if (keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
uint32_t num_blocks = size >> 4; uint32_t num_blocks = size >> 4;
size_t aligned_size = num_blocks << 4; size_t aligned_size = num_blocks << 4;
SECURITY_ENGINE->CONFIG_REG = (ALG_RNG | DST_MEMORY); se->CONFIG_REG = (ALG_RNG | DST_MEMORY);
SECURITY_ENGINE->CRYPTO_REG = (keyslot << 24) | 0x108; se->CRYPTO_REG = (keyslot << 24) | 0x108;
SECURITY_ENGINE->RNG_CONFIG_REG = 4; se->RNG_CONFIG_REG = 4;
if (num_blocks >= 1) { if (num_blocks >= 1) {
SECURITY_ENGINE->BLOCK_COUNT_REG = num_blocks - 1; se->BLOCK_COUNT_REG = num_blocks - 1;
trigger_se_blocking_op(OP_START, dst, aligned_size, NULL, 0); trigger_se_blocking_op(OP_START, dst, aligned_size, NULL, 0);
} }
if (size > aligned_size) { if (size > aligned_size) {
se_perform_aes_block_operation(dst + aligned_size, size - aligned_size, NULL, 0); se_perform_aes_block_operation(dst + aligned_size, size - aligned_size, NULL, 0);
} }
} }
/* SE context save API. */ /* SE context save API. */
void se_set_in_context_save_mode(bool is_context_save_mode) { void se_set_in_context_save_mode(bool is_context_save_mode) {
uint32_t val = SECURITY_ENGINE->_0x0; volatile tegra_se_t *se = se_get_regs();
uint32_t val = se->_0x0;
if (is_context_save_mode) { if (is_context_save_mode) {
val |= 0x10000; val |= 0x10000;
} else { } else {
val &= 0xFFFEFFFF; val &= 0xFFFEFFFF;
} }
SECURITY_ENGINE->_0x0 = val; se->_0x0 = val;
/* Perform a useless read from flags reg. */ /* Perform a useless read from flags reg. */
(void)(SECURITY_ENGINE->FLAGS_REG); (void)(se->FLAGS_REG);
} }
void se_generate_random_key(unsigned int dst_keyslot, unsigned int rng_keyslot) { void se_generate_random_key(unsigned int dst_keyslot, unsigned int rng_keyslot) {
volatile tegra_se_t *se = se_get_regs();
if (dst_keyslot >= KEYSLOT_AES_MAX || rng_keyslot >= KEYSLOT_AES_MAX) { if (dst_keyslot >= KEYSLOT_AES_MAX || rng_keyslot >= KEYSLOT_AES_MAX) {
generic_panic(); generic_panic();
} }
/* Setup Config. */ /* Setup Config. */
SECURITY_ENGINE->CONFIG_REG = (ALG_RNG | DST_KEYTAB); se->CONFIG_REG = (ALG_RNG | DST_KEYTAB);
SECURITY_ENGINE->CRYPTO_REG = (rng_keyslot << 24) | 0x108; se->CRYPTO_REG = (rng_keyslot << 24) | 0x108;
SECURITY_ENGINE->RNG_CONFIG_REG = 4; se->RNG_CONFIG_REG = 4;
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
/* Generate low part of key. */ /* Generate low part of key. */
SECURITY_ENGINE->CRYPTO_KEYTABLE_DST_REG = (dst_keyslot << 8); se->CRYPTO_KEYTABLE_DST_REG = (dst_keyslot << 8);
trigger_se_blocking_op(OP_START, NULL, 0, NULL, 0); trigger_se_blocking_op(OP_START, NULL, 0, NULL, 0);
/* Generate high part of key. */ /* Generate high part of key. */
SECURITY_ENGINE->CRYPTO_KEYTABLE_DST_REG = (dst_keyslot << 8) | 1; se->CRYPTO_KEYTABLE_DST_REG = (dst_keyslot << 8) | 1;
trigger_se_blocking_op(OP_START, NULL, 0, NULL, 0); trigger_se_blocking_op(OP_START, NULL, 0, NULL, 0);
} }
void se_generate_srk(unsigned int srkgen_keyslot) { void se_generate_srk(unsigned int srkgen_keyslot) {
SECURITY_ENGINE->CONFIG_REG = (ALG_RNG | DST_SRK); volatile tegra_se_t *se = se_get_regs();
SECURITY_ENGINE->CRYPTO_REG = (srkgen_keyslot << 24) | 0x108;
SECURITY_ENGINE->RNG_CONFIG_REG = 6; se->CONFIG_REG = (ALG_RNG | DST_SRK);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->CRYPTO_REG = (srkgen_keyslot << 24) | 0x108;
se->RNG_CONFIG_REG = 6;
se->BLOCK_COUNT_REG = 0;
trigger_se_blocking_op(OP_START, NULL, 0, NULL, 0); trigger_se_blocking_op(OP_START, NULL, 0, NULL, 0);
} }
@ -796,6 +835,7 @@ void se_encrypt_with_srk(void *dst, size_t dst_size, const void *src, size_t src
} }
void se_save_context(unsigned int srkgen_keyslot, unsigned int rng_keyslot, void *dst) { void se_save_context(unsigned int srkgen_keyslot, unsigned int rng_keyslot, void *dst) {
volatile tegra_se_t *se = se_get_regs();
uint8_t _work_buf[0x80]; uint8_t _work_buf[0x80];
uint8_t *work_buf = (uint8_t *)(((uintptr_t)_work_buf + 0x7F) & ~0x3F); uint8_t *work_buf = (uint8_t *)(((uintptr_t)_work_buf + 0x7F) & ~0x3F);
@ -808,39 +848,39 @@ void se_save_context(unsigned int srkgen_keyslot, unsigned int rng_keyslot, void
flush_dcache_range(work_buf, work_buf + 0x10); flush_dcache_range(work_buf, work_buf + 0x10);
/* Save random initial block. */ /* Save random initial block. */
SECURITY_ENGINE->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY); se->CONFIG_REG = (ALG_AES_ENC | DST_MEMORY);
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_MEM); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_MEM);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst, 0x10, work_buf, 0x10); se_encrypt_with_srk(dst, 0x10, work_buf, 0x10);
/* Save Sticky Bits. */ /* Save Sticky Bits. */
for (unsigned int i = 0; i < 0x2; i++) { for (unsigned int i = 0; i < 0x2; i++) {
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_STICKY_BITS) | (i << CTX_SAVE_STICKY_BIT_INDEX_SHIFT); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_STICKY_BITS) | (i << CTX_SAVE_STICKY_BIT_INDEX_SHIFT);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst + 0x10 + (i * 0x10), 0x10, NULL, 0); se_encrypt_with_srk(dst + 0x10 + (i * 0x10), 0x10, NULL, 0);
} }
/* Save AES Key Table. */ /* Save AES Key Table. */
for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) { for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) {
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_LOW_BITS); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_LOW_BITS);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst + 0x30 + (i * 0x20), 0x10, NULL, 0); se_encrypt_with_srk(dst + 0x30 + (i * 0x20), 0x10, NULL, 0);
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_HIGH_BITS); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_HIGH_BITS);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst + 0x40 + (i * 0x20), 0x10, NULL, 0); se_encrypt_with_srk(dst + 0x40 + (i * 0x20), 0x10, NULL, 0);
} }
/* Save AES Original IVs. */ /* Save AES Original IVs. */
for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) { for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) {
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_ORIGINAL_IV); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_ORIGINAL_IV);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst + 0x230 + (i * 0x10), 0x10, NULL, 0); se_encrypt_with_srk(dst + 0x230 + (i * 0x10), 0x10, NULL, 0);
} }
/* Save AES Updated IVs */ /* Save AES Updated IVs */
for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) { for (unsigned int i = 0; i < KEYSLOT_AES_MAX; i++) {
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_UPDATED_IV); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_AES) | (i << CTX_SAVE_KEY_INDEX_SHIFT) | (CTX_SAVE_KEY_UPDATED_IV);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst + 0x330 + (i * 0x10), 0x10, NULL, 0); se_encrypt_with_srk(dst + 0x330 + (i * 0x10), 0x10, NULL, 0);
} }
@ -849,8 +889,8 @@ void se_save_context(unsigned int srkgen_keyslot, unsigned int rng_keyslot, void
for (unsigned int rsa_key = 0; rsa_key < KEYSLOT_RSA_MAX; rsa_key++) { for (unsigned int rsa_key = 0; rsa_key < KEYSLOT_RSA_MAX; rsa_key++) {
for (unsigned int mod_exp = 0; mod_exp < 2; mod_exp++) { for (unsigned int mod_exp = 0; mod_exp < 2; mod_exp++) {
for (unsigned int sub_block = 0; sub_block < 0x10; sub_block++) { for (unsigned int sub_block = 0; sub_block < 0x10; sub_block++) {
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_RSA) | ((2 * rsa_key + (1 - mod_exp)) << CTX_SAVE_RSA_KEY_INDEX_SHIFT) | (sub_block << CTX_SAVE_RSA_KEY_BLOCK_INDEX_SHIFT); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_KEYTABLE_RSA) | ((2 * rsa_key + (1 - mod_exp)) << CTX_SAVE_RSA_KEY_INDEX_SHIFT) | (sub_block << CTX_SAVE_RSA_KEY_BLOCK_INDEX_SHIFT);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(rsa_ctx_out, 0x10, NULL, 0); se_encrypt_with_srk(rsa_ctx_out, 0x10, NULL, 0);
rsa_ctx_out += 0x10; rsa_ctx_out += 0x10;
} }
@ -859,14 +899,14 @@ void se_save_context(unsigned int srkgen_keyslot, unsigned int rng_keyslot, void
/* Save "Known Pattern. " */ /* Save "Known Pattern. " */
static const uint8_t context_save_known_pattern[0x10] = {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}; static const uint8_t context_save_known_pattern[0x10] = {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f};
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_MEM); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_MEM);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(dst + 0x830, 0x10, context_save_known_pattern, 0x10); se_encrypt_with_srk(dst + 0x830, 0x10, context_save_known_pattern, 0x10);
/* Save SRK into PMC registers. */ /* Save SRK into PMC registers. */
SECURITY_ENGINE->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_SRK); se->CONTEXT_SAVE_CONFIG_REG = (CTX_SAVE_SRC_SRK);
SECURITY_ENGINE->BLOCK_COUNT_REG = 0; se->BLOCK_COUNT_REG = 0;
se_encrypt_with_srk(work_buf, 0, NULL, 0); se_encrypt_with_srk(work_buf, 0, NULL, 0);
SECURITY_ENGINE->CONFIG_REG = 0; se->CONFIG_REG = 0;
se_encrypt_with_srk(work_buf, 0, NULL, 0); se_encrypt_with_srk(work_buf, 0, NULL, 0);
} }

View file

@ -97,7 +97,7 @@
#define RSA_2048_BYTES 0x100 #define RSA_2048_BYTES 0x100
typedef struct security_engine { typedef struct {
uint32_t _0x0; uint32_t _0x0;
uint32_t _0x4; uint32_t _0x4;
uint32_t OPERATION_REG; uint32_t OPERATION_REG;
@ -157,15 +157,13 @@ typedef struct security_engine {
uint32_t FLAGS_REG; uint32_t FLAGS_REG;
uint32_t ERR_STATUS_REG; uint32_t ERR_STATUS_REG;
uint32_t _0x808; uint32_t _0x808;
uint32_t _0x80C; uint32_t SPARE_0;
uint32_t _0x810; uint32_t _0x810;
uint32_t _0x814; uint32_t _0x814;
uint32_t _0x818; uint32_t _0x818;
uint32_t _0x81C; uint32_t _0x81C;
uint8_t _0x820[0x17E0]; uint8_t _0x820[0x17E0];
} security_engine_t; } tegra_se_t;
static_assert(sizeof(security_engine_t) == 0x2000, "Mis-defined Security Engine Registers!");
typedef struct { typedef struct {
uint32_t address; uint32_t address;
@ -177,15 +175,10 @@ typedef struct {
se_addr_info_t addr_info; /* This should really be an array...but for our use case it works. */ se_addr_info_t addr_info; /* This should really be an array...but for our use case it works. */
} se_ll_t; } se_ll_t;
static inline volatile tegra_se_t *se_get_regs(void) {
/* WIP, API subject to change. */ return (volatile tegra_se_t *)(MMIO_GET_DEVICE_ADDRESS(MMIO_DEVID_SE));
static inline volatile security_engine_t *get_security_engine(void) {
return (volatile security_engine_t *)(MMIO_GET_DEVICE_ADDRESS(MMIO_DEVID_SE));
} }
#define SECURITY_ENGINE (get_security_engine())
/* This function MUST be registered to fire on the appropriate interrupt. */ /* This function MUST be registered to fire on the appropriate interrupt. */
void se_operation_completed(void); void se_operation_completed(void);
@ -209,7 +202,6 @@ void set_rsa_keyslot(unsigned int keyslot, const void *modulus, size_t modulus_s
void set_aes_keyslot_iv(unsigned int keyslot, const void *iv, size_t iv_size); void set_aes_keyslot_iv(unsigned int keyslot, const void *iv, size_t iv_size);
void set_se_ctr(const void *ctr); void set_se_ctr(const void *ctr);
/* Insecure AES API */ /* Insecure AES API */
void se_aes_ctr_crypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, const void *ctr, unsigned int (*callback)(void)); void se_aes_ctr_crypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, const void *ctr, unsigned int (*callback)(void));
void se_aes_cbc_encrypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, const void *iv, unsigned int (*callback)(void)); void se_aes_cbc_encrypt_insecure(unsigned int keyslot, uint32_t out_ll_paddr, uint32_t in_ll_paddr, size_t size, const void *iv, unsigned int (*callback)(void));

View file

@ -175,8 +175,7 @@ bool i2c_write(volatile tegra_i2c_t *regs, uint8_t device, void *src, size_t src
i2c_load_config(regs); i2c_load_config(regs);
/* Config |= SEND; */ /* Config |= SEND; */
regs->I2C_I2C_CNFG_0 |= 0x200; regs->I2C_I2C_CNFG_0 = ((regs->I2C_I2C_CNFG_0 & 0xFFFFFDFF) | 0x200);
while (regs->I2C_I2C_STATUS_0 & 0x100) { while (regs->I2C_I2C_STATUS_0 & 0x100) {
/* Wait until not busy. */ /* Wait until not busy. */
@ -203,8 +202,7 @@ bool i2c_read(volatile tegra_i2c_t *regs, uint8_t device, void *dst, size_t dst_
i2c_load_config(regs); i2c_load_config(regs);
/* Config |= SEND; */ /* Config |= SEND; */
regs->I2C_I2C_CNFG_0 |= 0x200; regs->I2C_I2C_CNFG_0 = ((regs->I2C_I2C_CNFG_0 & 0xFFFFFDFF) | 0x200);
while (regs->I2C_I2C_STATUS_0 & 0x100) { while (regs->I2C_I2C_STATUS_0 & 0x100) {
/* Wait until not busy. */ /* Wait until not busy. */

View file

@ -21,8 +21,8 @@
#include <stdint.h> #include <stdint.h>
#include <string.h> #include <string.h>
#define I2C234_BASE 0x7000C000 #define I2C1234_BASE 0x7000C000
#define I2C56_BASE 0x7000D000 #define I2C56_BASE 0x7000D000
#define I2C_1 0 #define I2C_1 0
#define I2C_2 1 #define I2C_2 1
@ -82,10 +82,10 @@ typedef struct {
uint32_t I2C_I2C_HS_INTERFACE_TIMING_1_0; uint32_t I2C_I2C_HS_INTERFACE_TIMING_1_0;
} tegra_i2c_t; } tegra_i2c_t;
#define I2C1_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x000)) #define I2C1_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x000))
#define I2C2_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x400)) #define I2C2_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x400))
#define I2C3_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x500)) #define I2C3_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x500))
#define I2C4_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x700)) #define I2C4_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x700))
#define I2C5_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x000)) #define I2C5_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x000))
#define I2C6_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x100)) #define I2C6_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x100))

View file

@ -39,8 +39,7 @@ void NOINLINE ll_init(volatile se_ll_t *ll, void *buffer, size_t size) {
} }
void se_check_error_status_reg(void) { void se_check_error_status_reg(void) {
volatile tegra_se_t *se = se_get_regs(); if (se_get_regs()->ERR_STATUS_REG) {
if (se->ERR_STATUS_REG) {
generic_panic(); generic_panic();
} }
} }
@ -53,8 +52,7 @@ void se_check_for_error(void) {
} }
void se_verify_flags_cleared(void) { void se_verify_flags_cleared(void) {
volatile tegra_se_t *se = se_get_regs(); if (se_get_regs()->FLAGS_REG & 3) {
if (se->FLAGS_REG & 3) {
generic_panic(); generic_panic();
} }
} }
@ -193,9 +191,8 @@ void clear_aes_keyslot_iv(unsigned int keyslot) {
} }
void set_se_ctr(const void *ctr) { void set_se_ctr(const void *ctr) {
volatile tegra_se_t *se = se_get_regs();
for (unsigned int i = 0; i < 4; i++) { for (unsigned int i = 0; i < 4; i++) {
se->CRYPTO_CTR_REG[i] = read32le(ctr, i * 4); se_get_regs()->CRYPTO_CTR_REG[i] = read32le(ctr, i * 4);
} }
} }
@ -237,7 +234,6 @@ void se_synchronous_exp_mod(unsigned int keyslot, void *dst, size_t dst_size, co
} }
void se_get_exp_mod_output(void *buf, size_t size) { void se_get_exp_mod_output(void *buf, size_t size) {
volatile tegra_se_t *se = se_get_regs();
size_t num_dwords = (size >> 2); size_t num_dwords = (size >> 2);
if (num_dwords < 1) { if (num_dwords < 1) {
@ -249,7 +245,7 @@ void se_get_exp_mod_output(void *buf, size_t size) {
/* Copy endian swapped output. */ /* Copy endian swapped output. */
while (num_dwords) { while (num_dwords) {
*p_out = read32be(se->RSA_OUTPUT, offset); *p_out = read32be(se_get_regs()->RSA_OUTPUT, offset);
offset += 4; offset += 4;
p_out--; p_out--;
num_dwords--; num_dwords--;
@ -330,10 +326,8 @@ void trigger_se_blocking_op(unsigned int op, void *dst, size_t dst_size, const v
se_check_for_error(); se_check_for_error();
} }
/* Secure AES Functionality. */ /* Secure AES Functionality. */
void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src, size_t src_size) { void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs();
uint8_t block[0x10] = {0}; uint8_t block[0x10] = {0};
if (src_size > sizeof(block) || dst_size > sizeof(block)) { if (src_size > sizeof(block) || dst_size > sizeof(block)) {
@ -346,7 +340,7 @@ void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src,
} }
/* Trigger AES operation. */ /* Trigger AES operation. */
se->BLOCK_COUNT_REG = 0; se_get_regs()->BLOCK_COUNT_REG = 0;
trigger_se_blocking_op(OP_START, block, sizeof(block), block, sizeof(block)); trigger_se_blocking_op(OP_START, block, sizeof(block), block, sizeof(block));
/* Copy output data into dst. */ /* Copy output data into dst. */
@ -407,7 +401,6 @@ void se_aes_256_ecb_encrypt_block(unsigned int keyslot, void *dst, size_t dst_si
se_aes_ecb_encrypt_block(keyslot, dst, dst_size, src, src_size, 0x202); se_aes_ecb_encrypt_block(keyslot, dst, dst_size, src, src_size, 0x202);
} }
void se_aes_ecb_decrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) { void se_aes_ecb_decrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs(); volatile tegra_se_t *se = se_get_regs();
@ -535,7 +528,6 @@ void se_compute_aes_cmac(unsigned int keyslot, void *cmac, size_t cmac_size, con
se->CRYPTO_REG = (keyslot << 24) | (0x145); se->CRYPTO_REG = (keyslot << 24) | (0x145);
clear_aes_keyslot_iv(keyslot); clear_aes_keyslot_iv(keyslot);
unsigned int num_blocks = (data_size + 0xF) >> 4; unsigned int num_blocks = (data_size + 0xF) >> 4;
/* Handle aligned blocks. */ /* Handle aligned blocks. */
if (num_blocks > 1) { if (num_blocks > 1) {

View file

@ -34,6 +34,9 @@
#define KEYSLOT_SWITCH_4XNEWCONSOLEKEYGENKEY 0xE #define KEYSLOT_SWITCH_4XNEWCONSOLEKEYGENKEY 0xE
#define KEYSLOT_SWITCH_4XOLDDEVICEKEY 0xF #define KEYSLOT_SWITCH_4XOLDDEVICEKEY 0xF
/* This keyslot was added in 5.0.0. */
#define KEYSLOT_SWITCH_5XNEWDEVICEKEYGENKEY 0xA
#define KEYSLOT_AES_MAX 0x10 #define KEYSLOT_AES_MAX 0x10
#define KEYSLOT_RSA_MAX 0x2 #define KEYSLOT_RSA_MAX 0x2
@ -88,7 +91,7 @@
#define RSA_2048_BYTES 0x100 #define RSA_2048_BYTES 0x100
typedef struct security_engine { typedef struct {
uint32_t _0x0; uint32_t _0x0;
uint32_t _0x4; uint32_t _0x4;
uint32_t OPERATION_REG; uint32_t OPERATION_REG;
@ -170,8 +173,6 @@ static inline volatile tegra_se_t *se_get_regs(void) {
return (volatile tegra_se_t *)SE_BASE; return (volatile tegra_se_t *)SE_BASE;
} }
/* This function MUST be registered to fire on the appropriate interrupt. */
void se_check_error_status_reg(void); void se_check_error_status_reg(void);
void se_check_for_error(void); void se_check_for_error(void);
void se_trigger_interrupt(void); void se_trigger_interrupt(void);

View file

@ -6,6 +6,7 @@ PHDRS
{ {
crt0 PT_LOAD; crt0 PT_LOAD;
chainloader PT_LOAD; chainloader PT_LOAD;
nxboot PT_LOAD;
main PT_LOAD; main PT_LOAD;
} }
@ -13,16 +14,17 @@ PHDRS
MEMORY MEMORY
{ {
main : ORIGIN = 0xF0000000, LENGTH = 0x10000000 main : ORIGIN = 0xF0000000, LENGTH = 0x10000000
high_iram : ORIGIN = 0x40010000, LENGTH = 0x20000
low_iram : ORIGIN = 0x40003000, LENGTH = 0x8000 low_iram : ORIGIN = 0x40003000, LENGTH = 0x8000
} }
SECTIONS SECTIONS
{ {
PROVIDE(__start__ = 0xF0000000); PROVIDE(__start__ = 0xF0000000);
PROVIDE(__stack_top__ = 0x40010000); PROVIDE(__stack_top__ = 0x90020000);
PROVIDE(__stack_bottom__ = 0x4000C000); PROVIDE(__stack_bottom__ = 0x90010000);
PROVIDE(__heap_start__ = 0xE0000000); PROVIDE(__heap_start__ = 0x90020000);
PROVIDE(__heap_end__ = 0xF0000000); PROVIDE(__heap_end__ = 0xA0020000);
. = __start__; . = __start__;
@ -54,6 +56,27 @@ SECTIONS
PROVIDE (__chainloader_end__ = ABSOLUTE(.)); PROVIDE (__chainloader_end__ = ABSOLUTE(.));
} >low_iram :NONE } >low_iram :NONE
.nxboot_loadable :
{
. = ALIGN(32);
PROVIDE (__nxboot_start__ = ABSOLUTE(.));
PROVIDE (__nxboot_lma__ = LOADADDR(.nxboot_loadable));
KEEP(*(.nxboot.text.start))
nxboot_iram.o(.text*)
nxboot_iram.o(.rodata*)
nxboot_iram.o(.data*)
. = ALIGN(32);
} >high_iram AT>main :nxboot
.nxboot_bss (NOLOAD) :
{
. = ALIGN(32);
PROVIDE (__nxboot_bss_start__ = ABSOLUTE(.));
nxboot_iram.o(.bss* COMMON)
. = ALIGN(32);
PROVIDE (__nxboot_end__ = ABSOLUTE(.));
} >high_iram :NONE
.text : .text :
{ {
. = ALIGN(32); . = ALIGN(32);

View file

@ -175,8 +175,7 @@ bool i2c_write(volatile tegra_i2c_t *regs, uint8_t device, void *src, size_t src
i2c_load_config(regs); i2c_load_config(regs);
/* Config |= SEND; */ /* Config |= SEND; */
regs->I2C_I2C_CNFG_0 |= 0x200; regs->I2C_I2C_CNFG_0 = ((regs->I2C_I2C_CNFG_0 & 0xFFFFFDFF) | 0x200);
while (regs->I2C_I2C_STATUS_0 & 0x100) { while (regs->I2C_I2C_STATUS_0 & 0x100) {
/* Wait until not busy. */ /* Wait until not busy. */
@ -203,8 +202,7 @@ bool i2c_read(volatile tegra_i2c_t *regs, uint8_t device, void *dst, size_t dst_
i2c_load_config(regs); i2c_load_config(regs);
/* Config |= SEND; */ /* Config |= SEND; */
regs->I2C_I2C_CNFG_0 |= 0x200; regs->I2C_I2C_CNFG_0 = ((regs->I2C_I2C_CNFG_0 & 0xFFFFFDFF) | 0x200);
while (regs->I2C_I2C_STATUS_0 & 0x100) { while (regs->I2C_I2C_STATUS_0 & 0x100) {
/* Wait until not busy. */ /* Wait until not busy. */

View file

@ -21,8 +21,8 @@
#include <stdint.h> #include <stdint.h>
#include <string.h> #include <string.h>
#define I2C234_BASE 0x7000C000 #define I2C1234_BASE 0x7000C000
#define I2C56_BASE 0x7000D000 #define I2C56_BASE 0x7000D000
#define I2C_1 0 #define I2C_1 0
#define I2C_2 1 #define I2C_2 1
@ -82,10 +82,10 @@ typedef struct {
uint32_t I2C_I2C_HS_INTERFACE_TIMING_1_0; uint32_t I2C_I2C_HS_INTERFACE_TIMING_1_0;
} tegra_i2c_t; } tegra_i2c_t;
#define I2C1_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x000)) #define I2C1_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x000))
#define I2C2_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x400)) #define I2C2_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x400))
#define I2C3_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x500)) #define I2C3_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x500))
#define I2C4_REGS ((volatile tegra_i2c_t *)(I2C234_BASE + 0x700)) #define I2C4_REGS ((volatile tegra_i2c_t *)(I2C1234_BASE + 0x700))
#define I2C5_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x000)) #define I2C5_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x000))
#define I2C6_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x100)) #define I2C6_REGS ((volatile tegra_i2c_t *)(I2C56_BASE + 0x100))

View file

@ -51,8 +51,11 @@ static void __program_init_newlib_hooks(void) {
static void __program_move_additional_sections(void) { static void __program_move_additional_sections(void) {
#if defined(FUSEE_STAGE1_SRC) || defined(FUSEE_STAGE2_SRC) #if defined(FUSEE_STAGE1_SRC) || defined(FUSEE_STAGE2_SRC)
extern uint8_t __chainloader_lma__[], __chainloader_start__[], __chainloader_bss_start__[], __chainloader_end__[]; extern uint8_t __chainloader_lma__[], __chainloader_start__[], __chainloader_bss_start__[], __chainloader_end__[];
extern uint8_t __nxboot_lma__[], __nxboot_start__[], __nxboot_bss_start__[], __nxboot_end__[];
memcpy(__chainloader_start__, __chainloader_lma__, __chainloader_bss_start__ - __chainloader_start__); memcpy(__chainloader_start__, __chainloader_lma__, __chainloader_bss_start__ - __chainloader_start__);
memset(__chainloader_bss_start__, 0, __chainloader_end__ - __chainloader_bss_start__); memset(__chainloader_bss_start__, 0, __chainloader_end__ - __chainloader_bss_start__);
memcpy(__nxboot_start__, __nxboot_lma__, __nxboot_bss_start__ - __nxboot_start__);
memset(__nxboot_bss_start__, 0, __nxboot_end__ - __nxboot_bss_start__);
#endif #endif
} }

View file

@ -102,7 +102,8 @@ int main(int argc, void **argv) {
g_do_nxboot = loader_ctx->chainload_entrypoint == 0; g_do_nxboot = loader_ctx->chainload_entrypoint == 0;
if (g_do_nxboot) { if (g_do_nxboot) {
printf("Now performing nxboot.\n"); printf("Now performing nxboot.\n");
nxboot_main(); uint32_t boot_memaddr = nxboot_main();
nxboot_finish(boot_memaddr);
} else { } else {
/* TODO: What else do we want to do in terms of argc/argv? */ /* TODO: What else do we want to do in terms of argc/argv? */
const char *path = get_loader_ctx()->file_paths_to_load[get_loader_ctx()->file_id_of_entrypoint]; const char *path = get_loader_ctx()->file_paths_to_load[get_loader_ctx()->file_id_of_entrypoint];

View file

@ -185,9 +185,8 @@ static void nxboot_move_bootconfig() {
/* This is the main function responsible for booting Horizon. */ /* This is the main function responsible for booting Horizon. */
static nx_keyblob_t __attribute__((aligned(16))) g_keyblobs[32]; static nx_keyblob_t __attribute__((aligned(16))) g_keyblobs[32];
void nxboot_main(void) { uint32_t nxboot_main(void) {
volatile tegra_pmc_t *pmc = pmc_get_regs(); volatile tegra_pmc_t *pmc = pmc_get_regs();
volatile tegra_se_t *se = se_get_regs();
loader_ctx_t *loader_ctx = get_loader_ctx(); loader_ctx_t *loader_ctx = get_loader_ctx();
package2_header_t *package2; package2_header_t *package2;
size_t package2_size; size_t package2_size;
@ -397,43 +396,6 @@ void nxboot_main(void) {
} }
free(package2); free(package2);
/* Clear used keyslots. */
clear_aes_keyslot(KEYSLOT_SWITCH_PACKAGE2KEY);
clear_aes_keyslot(KEYSLOT_SWITCH_RNGKEY);
/* Lock keyslots. */
set_aes_keyslot_flags(KEYSLOT_SWITCH_MASTERKEY, 0xFF);
if (MAILBOX_EXOSPHERE_CONFIGURATION->target_firmware < EXOSPHERE_TARGET_FIRMWARE_400) {
set_aes_keyslot_flags(KEYSLOT_SWITCH_DEVICEKEY, 0xFF);
} else {
set_aes_keyslot_flags(KEYSLOT_SWITCH_4XOLDDEVICEKEY, 0xFF);
}
/* Finalize the GPU UCODE carveout. */
mc_config_carveout_finalize();
/* Lock AES keyslots. */
for (uint32_t i = 0; i < 16; i++)
set_aes_keyslot_flags(i, 0x15);
/* Lock RSA keyslots. */
for (uint32_t i = 0; i < 2; i++)
set_rsa_keyslot_flags(i, 1);
/* Lock the Security Engine. */
se->_0x4 = 0;
se->AES_KEY_READ_DISABLE_REG = 0;
se->RSA_KEY_READ_DISABLE_REG = 0;
se->_0x0 &= 0xFFFFFFFB;
/* Boot up Exosphère. */
MAILBOX_NX_BOOTLOADER_IS_SECMON_AWAKE = 0;
if (MAILBOX_EXOSPHERE_CONFIGURATION->target_firmware < EXOSPHERE_TARGET_FIRMWARE_400) {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_LOADED_PACKAGE2;
} else {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_DRAM_INITIALIZED_4X;
}
printf("[NXBOOT]: Powering on the CCPLEX...\n"); printf("[NXBOOT]: Powering on the CCPLEX...\n");
/* Display splash screen. */ /* Display splash screen. */
@ -442,26 +404,6 @@ void nxboot_main(void) {
/* Unmount everything. */ /* Unmount everything. */
nxfs_unmount_all(); nxfs_unmount_all();
/* Terminate the display. */ /* Return the memory address for booting CPU0. */
display_end(); return (uint32_t)exosphere_memaddr;
/* Boot CPU0. */
cluster_boot_cpu0((uint32_t)exosphere_memaddr);
/* Wait for Exosphère to wake up. */
while (MAILBOX_NX_BOOTLOADER_IS_SECMON_AWAKE == 0) {
udelay(1);
}
/* Signal Exosphère. */
if (MAILBOX_EXOSPHERE_CONFIGURATION->target_firmware < EXOSPHERE_TARGET_FIRMWARE_400) {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_FINISHED;
} else {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_FINISHED_4X;
}
/* Halt ourselves in waitevent state. */
while (1) {
FLOW_CTLR_HALT_COP_EVENTS_0 = 0x50000000;
}
} }

View file

@ -43,6 +43,7 @@ typedef struct {
uint32_t boot_reason_state; uint32_t boot_reason_state;
} boot_reason_t; } boot_reason_t;
void nxboot_main(void); uint32_t nxboot_main(void);
void nxboot_finish(uint32_t boot_memaddr);
#endif #endif

View file

@ -0,0 +1,90 @@
/*
* Copyright (c) 2018 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <string.h>
#include "cluster.h"
#include "di.h"
#include "exocfg.h"
#include "flow.h"
#include "mc.h"
#include "nxboot.h"
#include "se.h"
#include "timers.h"
void nxboot_finish(uint32_t boot_memaddr) {
volatile tegra_se_t *se = se_get_regs();
/* Clear used keyslots. */
clear_aes_keyslot(KEYSLOT_SWITCH_PACKAGE2KEY);
clear_aes_keyslot(KEYSLOT_SWITCH_RNGKEY);
/* Lock keyslots. */
set_aes_keyslot_flags(KEYSLOT_SWITCH_MASTERKEY, 0xFF);
if (MAILBOX_EXOSPHERE_CONFIGURATION->target_firmware < EXOSPHERE_TARGET_FIRMWARE_400) {
set_aes_keyslot_flags(KEYSLOT_SWITCH_DEVICEKEY, 0xFF);
} else {
set_aes_keyslot_flags(KEYSLOT_SWITCH_4XOLDDEVICEKEY, 0xFF);
}
/* Finalize the GPU UCODE carveout. */
mc_config_carveout_finalize();
/* Lock AES keyslots. */
for (uint32_t i = 0; i < 16; i++)
set_aes_keyslot_flags(i, 0x15);
/* Lock RSA keyslots. */
for (uint32_t i = 0; i < 2; i++)
set_rsa_keyslot_flags(i, 1);
/* Lock the Security Engine. */
se->_0x4 = 0;
se->AES_KEY_READ_DISABLE_REG = 0;
se->RSA_KEY_READ_DISABLE_REG = 0;
se->_0x0 &= 0xFFFFFFFB;
/* Boot up Exosphère. */
MAILBOX_NX_BOOTLOADER_IS_SECMON_AWAKE = 0;
if (MAILBOX_EXOSPHERE_CONFIGURATION->target_firmware < EXOSPHERE_TARGET_FIRMWARE_400) {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_LOADED_PACKAGE2;
} else {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_DRAM_INITIALIZED_4X;
}
/* Terminate the display. */
display_end();
/* Boot CPU0. */
cluster_boot_cpu0(boot_memaddr);
/* Wait for Exosphère to wake up. */
while (MAILBOX_NX_BOOTLOADER_IS_SECMON_AWAKE == 0) {
udelay(1);
}
/* Signal Exosphère. */
if (MAILBOX_EXOSPHERE_CONFIGURATION->target_firmware < EXOSPHERE_TARGET_FIRMWARE_400) {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_FINISHED;
} else {
MAILBOX_NX_BOOTLOADER_SETUP_STATE = NX_BOOTLOADER_STATE_FINISHED_4X;
}
/* Halt ourselves in waitevent state. */
while (1) {
FLOW_CTLR_HALT_COP_EVENTS_0 = 0x50000000;
}
}

View file

@ -39,8 +39,7 @@ void NOINLINE ll_init(volatile se_ll_t *ll, void *buffer, size_t size) {
} }
void se_check_error_status_reg(void) { void se_check_error_status_reg(void) {
volatile tegra_se_t *se = se_get_regs(); if (se_get_regs()->ERR_STATUS_REG) {
if (se->ERR_STATUS_REG) {
generic_panic(); generic_panic();
} }
} }
@ -53,8 +52,7 @@ void se_check_for_error(void) {
} }
void se_verify_flags_cleared(void) { void se_verify_flags_cleared(void) {
volatile tegra_se_t *se = se_get_regs(); if (se_get_regs()->FLAGS_REG & 3) {
if (se->FLAGS_REG & 3) {
generic_panic(); generic_panic();
} }
} }
@ -193,9 +191,8 @@ void clear_aes_keyslot_iv(unsigned int keyslot) {
} }
void set_se_ctr(const void *ctr) { void set_se_ctr(const void *ctr) {
volatile tegra_se_t *se = se_get_regs();
for (unsigned int i = 0; i < 4; i++) { for (unsigned int i = 0; i < 4; i++) {
se->CRYPTO_CTR_REG[i] = read32le(ctr, i * 4); se_get_regs()->CRYPTO_CTR_REG[i] = read32le(ctr, i * 4);
} }
} }
@ -237,7 +234,6 @@ void se_synchronous_exp_mod(unsigned int keyslot, void *dst, size_t dst_size, co
} }
void se_get_exp_mod_output(void *buf, size_t size) { void se_get_exp_mod_output(void *buf, size_t size) {
volatile tegra_se_t *se = se_get_regs();
size_t num_dwords = (size >> 2); size_t num_dwords = (size >> 2);
if (num_dwords < 1) { if (num_dwords < 1) {
@ -249,7 +245,7 @@ void se_get_exp_mod_output(void *buf, size_t size) {
/* Copy endian swapped output. */ /* Copy endian swapped output. */
while (num_dwords) { while (num_dwords) {
*p_out = read32be(se->RSA_OUTPUT, offset); *p_out = read32be(se_get_regs()->RSA_OUTPUT, offset);
offset += 4; offset += 4;
p_out--; p_out--;
num_dwords--; num_dwords--;
@ -330,10 +326,8 @@ void trigger_se_blocking_op(unsigned int op, void *dst, size_t dst_size, const v
se_check_for_error(); se_check_for_error();
} }
/* Secure AES Functionality. */ /* Secure AES Functionality. */
void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src, size_t src_size) { void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs();
uint8_t block[0x10] = {0}; uint8_t block[0x10] = {0};
if (src_size > sizeof(block) || dst_size > sizeof(block)) { if (src_size > sizeof(block) || dst_size > sizeof(block)) {
@ -346,7 +340,7 @@ void se_perform_aes_block_operation(void *dst, size_t dst_size, const void *src,
} }
/* Trigger AES operation. */ /* Trigger AES operation. */
se->BLOCK_COUNT_REG = 0; se_get_regs()->BLOCK_COUNT_REG = 0;
trigger_se_blocking_op(OP_START, block, sizeof(block), block, sizeof(block)); trigger_se_blocking_op(OP_START, block, sizeof(block), block, sizeof(block));
/* Copy output data into dst. */ /* Copy output data into dst. */
@ -407,7 +401,6 @@ void se_aes_256_ecb_encrypt_block(unsigned int keyslot, void *dst, size_t dst_si
se_aes_ecb_encrypt_block(keyslot, dst, dst_size, src, src_size, 0x202); se_aes_ecb_encrypt_block(keyslot, dst, dst_size, src, src_size, 0x202);
} }
void se_aes_ecb_decrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) { void se_aes_ecb_decrypt_block(unsigned int keyslot, void *dst, size_t dst_size, const void *src, size_t src_size) {
volatile tegra_se_t *se = se_get_regs(); volatile tegra_se_t *se = se_get_regs();
@ -535,7 +528,6 @@ void se_compute_aes_cmac(unsigned int keyslot, void *cmac, size_t cmac_size, con
se->CRYPTO_REG = (keyslot << 24) | (0x145); se->CRYPTO_REG = (keyslot << 24) | (0x145);
clear_aes_keyslot_iv(keyslot); clear_aes_keyslot_iv(keyslot);
unsigned int num_blocks = (data_size + 0xF) >> 4; unsigned int num_blocks = (data_size + 0xF) >> 4;
/* Handle aligned blocks. */ /* Handle aligned blocks. */
if (num_blocks > 1) { if (num_blocks > 1) {

View file

@ -34,6 +34,9 @@
#define KEYSLOT_SWITCH_4XNEWCONSOLEKEYGENKEY 0xE #define KEYSLOT_SWITCH_4XNEWCONSOLEKEYGENKEY 0xE
#define KEYSLOT_SWITCH_4XOLDDEVICEKEY 0xF #define KEYSLOT_SWITCH_4XOLDDEVICEKEY 0xF
/* This keyslot was added in 5.0.0. */
#define KEYSLOT_SWITCH_5XNEWDEVICEKEYGENKEY 0xA
#define KEYSLOT_AES_MAX 0x10 #define KEYSLOT_AES_MAX 0x10
#define KEYSLOT_RSA_MAX 0x2 #define KEYSLOT_RSA_MAX 0x2
@ -88,7 +91,7 @@
#define RSA_2048_BYTES 0x100 #define RSA_2048_BYTES 0x100
typedef struct security_engine { typedef struct {
uint32_t _0x0; uint32_t _0x0;
uint32_t _0x4; uint32_t _0x4;
uint32_t OPERATION_REG; uint32_t OPERATION_REG;
@ -170,8 +173,6 @@ static inline volatile tegra_se_t *se_get_regs(void) {
return (volatile tegra_se_t *)SE_BASE; return (volatile tegra_se_t *)SE_BASE;
} }
/* This function MUST be registered to fire on the appropriate interrupt. */
void se_check_error_status_reg(void); void se_check_error_status_reg(void);
void se_check_for_error(void); void se_check_for_error(void);
void se_trigger_interrupt(void); void se_trigger_interrupt(void);

View file

@ -31,5 +31,5 @@ void display_splash_screen_bmp(const char *custom_splash_path) {
/* TODO: Display the splash screen. It should be a pointer to a BMP, at this point. */ /* TODO: Display the splash screen. It should be a pointer to a BMP, at this point. */
/* Display the splash screen for three seconds. */ /* Display the splash screen for three seconds. */
/* udelay(3000000); */ udelay(3000000);
} }

View file

@ -70,3 +70,12 @@ _start:
relocate_and_chainload: relocate_and_chainload:
ldr sp, =__stack_top__ ldr sp, =__stack_top__
b relocate_and_chainload_main b relocate_and_chainload_main
.section .nxboot.text.start, "ax", %progbits
.arm
.align 5
.global nxboot
.type nxboot, %function
nxboot:
ldr sp, =__stack_top__
b nxboot_finish