/tf-a-ffa_el3_spmc/drivers/nxp/crypto/caam/src/auth/ |
A D | hash.c | 48 *ctx = &glbl_ctx; in hash_init() 71 ctx->active = false; in hash_update() 78 ctx->active = false; in hash_update() 88 sec_out32(&ctx->sg_tbl[ctx->sg_num].addr_hi, in hash_update() 91 sec_out32(&ctx->sg_tbl[ctx->sg_num].addr_hi, 0x0); in hash_update() 93 sec_out32(&ctx->sg_tbl[ctx->sg_num].addr_lo, (uintptr_t) data_ptr); in hash_update() 95 sec_out32(&ctx->sg_tbl[ctx->sg_num].len_flag, in hash_update() 98 ctx->sg_num++; in hash_update() 129 final = sec_in32(&ctx->sg_tbl[ctx->sg_num - 1].len_flag) | in hash_final() 131 sec_out32(&ctx->sg_tbl[ctx->sg_num - 1].len_flag, final); in hash_final() [all …]
|
A D | rsa.c | 41 struct rsa_context ctx __aligned(CACHE_WRITEBACK_GRANULE); in rsa_public_verif_sec() 47 memset(&ctx, 0, sizeof(struct rsa_context)); in rsa_public_verif_sec() 49 ctx.pkin.a = sign; in rsa_public_verif_sec() 50 ctx.pkin.a_siz = klen; in rsa_public_verif_sec() 51 ctx.pkin.n = rsa_pub_key; in rsa_public_verif_sec() 52 ctx.pkin.n_siz = klen; in rsa_public_verif_sec() 53 ctx.pkin.e = rsa_pub_key + klen; in rsa_public_verif_sec() 54 ctx.pkin.e_siz = klen; in rsa_public_verif_sec() 56 cnstr_jobdesc_pkha_rsaexp(jobdesc.desc, &ctx.pkin, to, klen); in rsa_public_verif_sec() 61 flush_dcache_range((uintptr_t)&ctx.pkin, sizeof(ctx.pkin)); in rsa_public_verif_sec()
|
/tf-a-ffa_el3_spmc/lib/xlat_tables_v2/ |
A D | xlat_tables_core.c | 97 assert(ctx->next_table < ctx->tables_num); in xlat_table_get_empty() 99 return ctx->tables[ctx->next_table++]; in xlat_table_get_empty() 707 if (ctx->mmap[ctx->mmap_num - 1].size != 0U) in mmap_add_region_check() 774 const mmap_region_t *mm_end = ctx->mmap + ctx->mmap_num; in mmap_add_region_ctx() 1002 0U, ctx->base_table, ctx->base_table_entries, in mmap_add_dynamic_region_ctx() 1031 ctx->base_table, ctx->base_table_entries, in mmap_add_dynamic_region_ctx() 1116 xlat_tables_unmap_region(ctx, mm, 0U, ctx->base_table, in mmap_remove_dynamic_region_ctx() 1132 mm = ctx->mmap; in mmap_remove_dynamic_region_ctx() 1218 ctx->base_table, ctx->base_table_entries, in init_xlat_tables_ctx() 1235 assert(ctx->max_va <= ctx->va_max_address); in init_xlat_tables_ctx() [all …]
|
A D | xlat_tables_utils.c | 227 ctx->base_table_entries); in xlat_tables_print() 242 xlat_tables_print_internal(ctx, 0U, ctx->base_table, in xlat_tables_print() 243 ctx->base_table_entries, ctx->base_level); in xlat_tables_print() 340 assert(ctx != NULL); in xlat_get_mem_attributes_internal() 341 assert(ctx->initialized); in xlat_get_mem_attributes_internal() 350 ctx->base_table, in xlat_get_mem_attributes_internal() 351 ctx->base_table_entries, in xlat_get_mem_attributes_internal() 375 xlat_desc_print(ctx, desc); in xlat_get_mem_attributes_internal() 435 assert(ctx != NULL); in xlat_change_mem_attributes_ctx() 436 assert(ctx->initialized); in xlat_change_mem_attributes_ctx() [all …]
|
A D | xlat_tables_private.h | 75 void xlat_tables_print(xlat_ctx_t *ctx); 80 uint64_t xlat_desc(const xlat_ctx_t *ctx, uint32_t attr, 100 bool is_mmu_enabled_ctx(const xlat_ctx_t *ctx);
|
/tf-a-ffa_el3_spmc/lib/el3_runtime/aarch64/ |
A D | context_mgmt.c | 74 assert(ctx != NULL); in cm_setup_context() 79 zeromem(ctx, sizeof(*ctx)); in cm_setup_context() 345 sve_enable(ctx); in enable_extensions_nonsecure() 361 sve_enable(ctx); in enable_extensions_secure() 374 cpu_context_t *ctx; in cm_init_context_by_index() local 386 cpu_context_t *ctx; in cm_init_my_context() local 660 cpu_context_t *ctx; in cm_el1_sysregs_context_save() local 677 cpu_context_t *ctx; in cm_el1_sysregs_context_restore() local 698 cpu_context_t *ctx; in cm_set_elr_el3() local 716 cpu_context_t *ctx; in cm_set_elr_spsr_el3() local [all …]
|
/tf-a-ffa_el3_spmc/plat/qti/qtiseclib/src/ |
A D | qtiseclib_cb_interface.c | 132 void *ctx; in qtiseclib_cb_get_ns_ctx() local 134 ctx = cm_get_context(NON_SECURE); in qtiseclib_cb_get_ns_ctx() 137 read_ctx_reg(get_el3state_ctx(ctx), CTX_SPSR_EL3); in qtiseclib_cb_get_ns_ctx() 141 read_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SPSR_EL1); in qtiseclib_cb_get_ns_ctx() 143 read_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_ELR_EL1); in qtiseclib_cb_get_ns_ctx() 146 qti_ns_ctx->x0 = read_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0); in qtiseclib_cb_get_ns_ctx() 147 qti_ns_ctx->x1 = read_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1); in qtiseclib_cb_get_ns_ctx() 148 qti_ns_ctx->x2 = read_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2); in qtiseclib_cb_get_ns_ctx() 149 qti_ns_ctx->x3 = read_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3); in qtiseclib_cb_get_ns_ctx() 150 qti_ns_ctx->x4 = read_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4); in qtiseclib_cb_get_ns_ctx() [all …]
|
/tf-a-ffa_el3_spmc/services/std_svc/spmd/ |
A D | spmd_pm.c | 83 assert(ctx != NULL); in spmd_cpu_on_finish_handler() 84 assert(ctx->state != SPMC_STATE_ON); in spmd_cpu_on_finish_handler() 100 cm_setup_context(&ctx->cpu_ctx, spmc_ep_info); in spmd_cpu_on_finish_handler() 103 ctx->state = SPMC_STATE_ON_PENDING; in spmd_cpu_on_finish_handler() 105 rc = spmd_spm_core_sync_entry(ctx); in spmd_cpu_on_finish_handler() 109 ctx->state = SPMC_STATE_OFF; in spmd_cpu_on_finish_handler() 113 ctx->state = SPMC_STATE_ON; in spmd_cpu_on_finish_handler() 127 assert(ctx != NULL); in spmd_cpu_off_handler() 128 assert(ctx->state != SPMC_STATE_OFF); in spmd_cpu_off_handler() 133 rc = spmd_spm_core_sync_entry(ctx); in spmd_cpu_off_handler() [all …]
|
/tf-a-ffa_el3_spmc/drivers/amlogic/crypto/ |
A D | sha_dma.c | 129 if (ctx->started == 0) { in asd_compute_sha() 131 ctx->started = 1; in asd_compute_sha() 135 ctx->started = 0; in asd_compute_sha() 156 if (ctx->blocksz) { in asd_sha_update() 158 memcpy(ctx->block + ctx->blocksz, data, nr); in asd_sha_update() 159 ctx->blocksz += nr; in asd_sha_update() 165 asd_compute_sha(ctx, ctx->block, SHA256_BLOCKSZ, 0); in asd_sha_update() 166 ctx->blocksz = 0; in asd_sha_update() 174 memcpy(ctx->block + ctx->blocksz, data, nr); in asd_sha_update() 175 ctx->blocksz += nr; in asd_sha_update() [all …]
|
/tf-a-ffa_el3_spmc/lib/cpus/aarch64/ |
A D | cpuamu.c | 32 struct cpuamu_ctx *ctx = &cpuamu_ctxs[plat_my_core_pos()]; in cpuamu_context_save() local 38 ctx->mask = cpuamu_read_cpuamcntenset_el0(); in cpuamu_context_save() 41 cpuamu_write_cpuamcntenclr_el0(ctx->mask); in cpuamu_context_save() 46 ctx->cnts[i] = cpuamu_cnt_read(i); in cpuamu_context_save() 51 struct cpuamu_ctx *ctx = &cpuamu_ctxs[plat_my_core_pos()]; in cpuamu_context_restore() local 60 cpuamu_write_cpuamcntenclr_el0(ctx->mask); in cpuamu_context_restore() 65 cpuamu_cnt_write(i, ctx->cnts[i]); in cpuamu_context_restore() 69 cpuamu_write_cpuamcntenset_el0(ctx->mask); in cpuamu_context_restore()
|
/tf-a-ffa_el3_spmc/services/std_svc/spm/spm_mm/ |
A D | spm_mm_setup.c | 29 cpu_context_t *ctx = &(sp_ctx->cpu_ctx); in spm_el0_sp_setup() local 45 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_MAIR_EL1, in spm_el0_sp_setup() 48 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_TCR_EL1, in spm_el0_sp_setup() 51 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_TTBR0_EL1, in spm_el0_sp_setup() 55 u_register_t sctlr_el1 = read_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1); in spm_el0_sp_setup() 91 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1, sctlr_el1); in spm_el0_sp_setup() 99 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_VBAR_EL1, in spm_el0_sp_setup() 102 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_CNTKCTL_EL1, in spm_el0_sp_setup() 112 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_CPACR_EL1, in spm_el0_sp_setup()
|
A D | spm_mm_common.c | 84 uint64_t spm_sp_synchronous_entry(sp_context_t *ctx) in spm_sp_synchronous_entry() argument 88 assert(ctx != NULL); in spm_sp_synchronous_entry() 91 cm_set_context(&(ctx->cpu_ctx), SECURE); in spm_sp_synchronous_entry() 102 rc = spm_secure_partition_enter(&ctx->c_rt_ctx); in spm_sp_synchronous_entry() 114 __dead2 void spm_sp_synchronous_exit(sp_context_t *ctx, uint64_t rc) in spm_sp_synchronous_exit() argument 121 spm_secure_partition_exit(ctx->c_rt_ctx, rc); in spm_sp_synchronous_exit()
|
A D | spm_mm_main.c | 39 sp_context_t *ctx; in spm_init() local 43 ctx = &sp_ctx; in spm_init() 45 ctx->state = SP_STATE_RESET; in spm_init() 47 rc = spm_sp_synchronous_entry(ctx); in spm_init() 50 ctx->state = SP_STATE_IDLE; in spm_init() 62 sp_context_t *ctx; in spm_mm_setup() local 71 ctx = &sp_ctx; in spm_mm_setup() 72 cpu_ctx = &(ctx->cpu_ctx); in spm_mm_setup() 141 mmap_add_region_ctx(ctx->xlat_ctx_handle, in spm_mm_setup() 144 mmap_add_ctx(ctx->xlat_ctx_handle, in spm_mm_setup() [all …]
|
/tf-a-ffa_el3_spmc/lib/el3_runtime/aarch32/ |
A D | context_mgmt.c | 60 assert(ctx != NULL); in cm_setup_context() 65 zeromem(ctx, sizeof(*ctx)); in cm_setup_context() 67 reg_ctx = get_regs_ctx(ctx); in cm_setup_context() 150 cpu_context_t *ctx; in cm_init_context_by_index() local 152 cm_setup_context(ctx, ep); in cm_init_context_by_index() 162 cpu_context_t *ctx; in cm_init_my_context() local 164 cm_setup_context(ctx, ep); in cm_init_my_context() 178 cpu_context_t *ctx = cm_get_context(security_state); in cm_prepare_el3_exit() local 181 assert(ctx != NULL); in cm_prepare_el3_exit() 184 scr = read_ctx_reg(get_regs_ctx(ctx), CTX_SCR); in cm_prepare_el3_exit() [all …]
|
/tf-a-ffa_el3_spmc/include/drivers/amlogic/crypto/ |
A D | sha_dma.h | 26 static inline void asd_sha_init(struct asd_ctx *ctx, enum ASD_MODE mode) in asd_sha_init() argument 28 ctx->started = 0; in asd_sha_init() 29 ctx->mode = mode; in asd_sha_init() 30 ctx->blocksz = 0; in asd_sha_init() 33 void asd_sha_update(struct asd_ctx *ctx, void *data, size_t len); 34 void asd_sha_finalize(struct asd_ctx *ctx);
|
/tf-a-ffa_el3_spmc/services/spd/trusty/ |
A D | trusty.c | 157 ctx->fiq_handler_active = 1; in trusty_fiq_handler() 158 (void)memcpy(&ctx->fiq_gpregs, get_gpregs_ctx(handle), sizeof(ctx->fiq_gpregs)); in trusty_fiq_handler() 164 cm_set_elr_spsr_el3(NON_SECURE, ctx->fiq_handler_pc, (uint32_t)ctx->fiq_handler_cpsr); in trusty_fiq_handler() 172 struct trusty_cpu_ctx *ctx; in trusty_set_fiq_handler() local 179 ctx = &trusty_cpu_ctx[cpu]; in trusty_set_fiq_handler() 182 ctx->fiq_handler_sp = stack; in trusty_set_fiq_handler() 192 SMC_RET4(handle, ctx->fiq_pc, ctx->fiq_cpsr, sp_el0, ctx->fiq_sp_el1); in trusty_get_fiq_regs() 220 (void)memcpy(get_gpregs_ctx(handle), &ctx->fiq_gpregs, sizeof(ctx->fiq_gpregs)); in trusty_fiq_exit() 221 ctx->fiq_handler_active = 0; in trusty_fiq_exit() 223 cm_set_elr_spsr_el3(NON_SECURE, ctx->fiq_pc, (uint32_t)ctx->fiq_cpsr); in trusty_fiq_exit() [all …]
|
/tf-a-ffa_el3_spmc/services/std_svc/spm/spmc/ |
A D | spmc_setup.c | 43 cpu_context_t *ctx; in spmc_el0_sp_setup() local 50 ctx = &sp->ec[sel0_sp_ec_index].cpu_ctx; in spmc_el0_sp_setup() 66 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_MAIR_EL1, in spmc_el0_sp_setup() 69 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_TCR_EL1, in spmc_el0_sp_setup() 72 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_TTBR0_EL1, in spmc_el0_sp_setup() 76 u_register_t sctlr_el1 = read_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1); in spmc_el0_sp_setup() 112 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_SCTLR_EL1, sctlr_el1); in spmc_el0_sp_setup() 120 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_VBAR_EL1, in spmc_el0_sp_setup() 123 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_CNTKCTL_EL1, in spmc_el0_sp_setup() 133 write_ctx_reg(get_el1_sysregs_ctx(ctx), CTX_CPACR_EL1, in spmc_el0_sp_setup() [all …]
|
/tf-a-ffa_el3_spmc/lib/extensions/amu/aarch64/ |
A D | amu.c | 49 void amu_enable(bool el2_unused, cpu_context_t *ctx) in amu_enable() argument 95 v = read_ctx_reg(get_el3state_ctx(ctx), CTX_CPTR_EL3); in amu_enable() 97 write_ctx_reg(get_el3state_ctx(ctx), CTX_CPTR_EL3, v); in amu_enable() 254 struct amu_ctx *ctx = &amu_ctxs[plat_my_core_pos()]; in amu_context_save() local 285 ctx->group0_cnts[i] = amu_group0_cnt_read(i); in amu_context_save() 301 ctx->group1_cnts[i] = amu_group1_cnt_read(i); in amu_context_save() 314 ctx->group1_voffsets[i] = in amu_context_save() 325 struct amu_ctx *ctx = &amu_ctxs[plat_my_core_pos()]; in amu_context_restore() local 346 amu_group0_cnt_write(i, ctx->group0_cnts[i]); in amu_context_restore() 365 amu_group1_cnt_write(i, ctx->group1_cnts[i]); in amu_context_restore() [all …]
|
/tf-a-ffa_el3_spmc/include/lib/el3_runtime/aarch64/ |
A D | context.h | 399 #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) argument 400 #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \ argument 473 #define set_aapcs_args0(ctx, x0) do { \ argument 476 #define set_aapcs_args1(ctx, x0, x1) do { \ argument 478 set_aapcs_args0(ctx, x0); \ 480 #define set_aapcs_args2(ctx, x0, x1, x2) do { \ argument 482 set_aapcs_args1(ctx, x0, x1); \ 486 set_aapcs_args2(ctx, x0, x1, x2); \ 490 set_aapcs_args3(ctx, x0, x1, x2, x3); \ 494 set_aapcs_args4(ctx, x0, x1, x2, x3, x4); \ [all …]
|
/tf-a-ffa_el3_spmc/plat/imx/common/ |
A D | plat_imx8_gic.c | 100 void plat_gic_save(unsigned int proc_num, struct plat_gic_ctx *ctx) in plat_gic_save() argument 104 gicv3_rdistif_save(i, &ctx->rdist_ctx[i]); in plat_gic_save() 105 gicv3_distif_save(&ctx->dist_ctx); in plat_gic_save() 108 void plat_gic_restore(unsigned int proc_num, struct plat_gic_ctx *ctx) in plat_gic_restore() argument 111 gicv3_distif_init_restore(&ctx->dist_ctx); in plat_gic_restore() 113 gicv3_rdistif_init_restore(i, &ctx->rdist_ctx[i]); in plat_gic_restore()
|
/tf-a-ffa_el3_spmc/tools/encrypt_fw/src/ |
A D | encrypt.c | 28 EVP_CIPHER_CTX *ctx; in gcm_encrypt() local 79 ctx = EVP_CIPHER_CTX_new(); in gcm_encrypt() 80 if (ctx == NULL) { in gcm_encrypt() 86 ret = EVP_EncryptInit_ex(ctx, EVP_aes_256_gcm(), NULL, NULL, NULL); in gcm_encrypt() 93 ret = EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv); in gcm_encrypt() 100 ret = EVP_EncryptUpdate(ctx, enc_data, &enc_len, data, bytes); in gcm_encrypt() 110 ret = EVP_EncryptFinal_ex(ctx, enc_data, &enc_len); in gcm_encrypt() 117 ret = EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_GCM_GET_TAG, TAG_SIZE, tag); in gcm_encrypt() 141 EVP_CIPHER_CTX_free(ctx); in gcm_encrypt()
|
/tf-a-ffa_el3_spmc/drivers/nxp/auth/csf_hdr_parser/ |
A D | csf_hdr_parser.c | 52 void *ctx; in deploy_rotpk_hash_table() local 68 ret = hash_init(algo, &ctx); in deploy_rotpk_hash_table() 74 ret = hash_update(algo, ctx, (uint8_t *)((uint8_t *)srk_buffer), in deploy_rotpk_hash_table() 81 ret = hash_final(algo, ctx, hash, digest_size); in deploy_rotpk_hash_table() 109 ret = hash_init(algo, &ctx); in deploy_rotpk_hash_table() 121 ret = hash_final(algo, ctx, rotpk_hash_table[i], digest_size); in deploy_rotpk_hash_table() 144 void *ctx; in calc_img_hash() local 149 ret = hash_init(algo, &ctx); in calc_img_hash() 162 ret = hash_update(algo, ctx, in calc_img_hash() 170 ret = hash_update(algo, ctx, (uint8_t *)(img_addr), img_size); in calc_img_hash() [all …]
|
/tf-a-ffa_el3_spmc/include/lib/xlat_tables/ |
A D | xlat_tables_v2.h | 234 void init_xlat_tables_ctx(xlat_ctx_t *ctx); 241 void xlat_setup_dynamic_ctx(xlat_ctx_t *ctx, unsigned long long pa_max, 254 void mmap_add_region_ctx(xlat_ctx_t *ctx, const mmap_region_t *mm); 262 void mmap_add_ctx(xlat_ctx_t *ctx, const mmap_region_t *mm); 271 void mmap_add_region_alloc_va_ctx(xlat_ctx_t *ctx, mmap_region_t *mm); 294 int mmap_add_dynamic_region_ctx(xlat_ctx_t *ctx, mmap_region_t *mm); 309 int mmap_add_dynamic_region_alloc_va_ctx(xlat_ctx_t *ctx, mmap_region_t *mm); 322 int mmap_remove_dynamic_region_ctx(xlat_ctx_t *ctx, 368 int xlat_change_mem_attributes_ctx(const xlat_ctx_t *ctx, uintptr_t base_va, 397 int xlat_get_mem_attributes_ctx(const xlat_ctx_t *ctx, uintptr_t base_va,
|
/tf-a-ffa_el3_spmc/include/lib/el3_runtime/aarch32/ |
A D | context.h | 50 #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[offset >> WORD_SHIFT]) argument 51 #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[offset >> WORD_SHIFT]) \ argument
|
/tf-a-ffa_el3_spmc/plat/nvidia/tegra/common/ |
A D | tegra_fiq_glue.c | 42 cpu_context_t *ctx = cm_get_context(NON_SECURE); in tegra_fiq_interrupt_handler() local 43 el3_state_t *el3state_ctx = get_el3state_ctx(ctx); in tegra_fiq_interrupt_handler() 129 cpu_context_t *ctx = cm_get_context(NON_SECURE); in tegra_fiq_get_intr_context() local 130 gp_regs_t *gpregs_ctx = get_gpregs_ctx(ctx); in tegra_fiq_get_intr_context() 131 const el1_sysregs_t *el1state_ctx = get_el1_sysregs_ctx(ctx); in tegra_fiq_get_intr_context()
|