/optee_os/core/include/mm/ |
A D | vm.h | 13 TEE_Result vm_info_init(struct user_mode_ctx *uctx); 16 void vm_info_final(struct user_mode_ctx *uctx); 29 TEE_Result vm_map_pad(struct user_mode_ctx *uctx, vaddr_t *va, size_t len, 39 static inline TEE_Result vm_map(struct user_mode_ctx *uctx, vaddr_t *va, in vm_map() argument 43 return vm_map_pad(uctx, va, len, prot, flags, mobj, offs, 0, 0, 0); in vm_map() 63 void vm_clean_param(struct user_mode_ctx *uctx); 65 TEE_Result vm_add_rwmem(struct user_mode_ctx *uctx, struct mobj *mobj, 76 bool vm_buf_is_inside_um_private(const struct user_mode_ctx *uctx, 79 bool vm_buf_intersects_um_private(const struct user_mode_ctx *uctx, 82 TEE_Result vm_buf_to_mboj_offs(const struct user_mode_ctx *uctx, [all …]
|
A D | sp_mem.h | 77 void *sp_mem_get_va(const struct user_mode_ctx *uctx, size_t offset,
|
/optee_os/core/mm/ |
A D | vm.c | 299 res = alloc_pgt(uctx); in vm_map_pad() 552 rem_um_region(uctx, r); in vm_remap() 578 res = alloc_pgt(uctx); in vm_remap() 629 if (alloc_pgt(uctx)) in vm_remap() 834 memset(&uctx->vm_info, 0, sizeof(uctx->vm_info)); in vm_info_init() 838 res = map_kinit(uctx); in vm_info_init() 840 vm_info_final(uctx); in vm_info_init() 1014 res = alloc_pgt(uctx); in vm_map_param() 1017 vm_clean_param(uctx); in vm_map_param() 1046 res = alloc_pgt(uctx); in vm_add_rwmem() [all …]
|
A D | mobj.c | 493 tee_pager_rem_um_region(&m->utc->uctx, m->va, mobj->size); in mobj_seccpy_shm_free() 494 vm_rem_rwmem(&m->utc->uctx, mobj, m->va); in mobj_seccpy_shm_free() 540 if (vm_add_rwmem(&utc->uctx, &m->mobj, &va) != TEE_SUCCESS) in mobj_seccpy_shm_alloc() 545 if (tee_pager_add_um_region(&utc->uctx, va, m->fobj, in mobj_seccpy_shm_alloc() 554 vm_rem_rwmem(&utc->uctx, &m->mobj, va); in mobj_seccpy_shm_alloc()
|
/optee_os/core/arch/arm/kernel/ |
A D | ldelf_loader.c | 60 uctx->is_32bit = is_arm32; in ldelf_load_ldelf() 81 vm_set_ctx(uctx->ts_ctx); in ldelf_load_ldelf() 101 struct user_mode_ctx *uctx) in ldelf_init_with_ldelf() argument 113 arg->uuid = uctx->ts_ctx->uuid; in ldelf_init_with_ldelf() 121 thread_user_clear_vfp(uctx); in ldelf_init_with_ldelf() 242 if (uctx->is_32bit) { in ldelf_dump_state() 274 thread_user_clear_vfp(uctx); in ldelf_dump_state() 278 uctx->dump_entry_func = 0; in ldelf_dump_state() 298 if (!uctx->ftrace_entry_func) in ldelf_dump_ftrace() 323 thread_user_clear_vfp(uctx); in ldelf_dump_ftrace() [all …]
|
A D | secure_partition.c | 113 struct user_mode_ctx *uctx) in sp_has_exclusive_access() argument 119 if (uctx) { in sp_has_exclusive_access() 176 spc->uctx.ts_ctx = &spc->ts_ctx; in sp_create_ctx() 181 res = vm_info_init(&spc->uctx); in sp_create_ctx() 229 sp_regs->sp = ctx->uctx.stack_ptr; in sp_init_set_registers() 230 sp_regs->pc = ctx->uctx.entry_func; in sp_init_set_registers() 292 res = vm_unmap(&ctx->uctx, vaddr, len); in sp_unmap_ffa_regions() 325 ldelf_load_ldelf(&ctx->uctx); in sp_open_session() 421 thread_user_clear_vfp(&ctx->uctx); in sp_enter_invoke_cmd() 458 struct sp_ctx *uctx = to_sp_ctx(ts->ctx); in sp_handle_svc() local [all …]
|
A D | stmm_sp.c | 92 spc->uctx.ts_ctx = &spc->ta_ctx.ts_ctx; in stmm_alloc_ctx() 94 res = vm_info_init(&spc->uctx); in stmm_alloc_ctx() 120 thread_user_clear_vfp(&spc->uctx); in stmm_enter_user_mode() 177 res = vm_map(&spc->uctx, va, num_pgs * SMALL_PAGE_SIZE, in alloc_and_map_sp_fobj() 265 res = vm_set_prot(&spc->uctx, heap_addr, stmm_heap_size, in load_stmm() 477 return to_stmm_ctx(ctx)->uctx.vm_info.asid; in stmm_get_instance_id() 484 tee_pager_rem_um_regions(&spc->uctx); in stmm_ctx_destroy() 485 vm_info_final(&spc->uctx); in stmm_ctx_destroy() 545 res = vm_set_prot(&spc->uctx, va, sz, prot); in sp_svc_set_mem_attr() 649 res = vm_check_access_rights(&spc->uctx, in sec_storage_obj_read() [all …]
|
/optee_os/core/arch/arm/include/mm/ |
A D | tee_pager.h | 102 TEE_Result tee_pager_add_um_region(struct user_mode_ctx *uctx, vaddr_t base, 106 tee_pager_add_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_add_um_region() 125 bool tee_pager_set_um_region_attr(struct user_mode_ctx *uctx, vaddr_t base, 129 tee_pager_set_um_region_attr(struct user_mode_ctx *uctx __unused, in tee_pager_set_um_region_attr() 138 void tee_pager_rem_um_region(struct user_mode_ctx *uctx, vaddr_t base, 152 tee_pager_split_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_split_um_region() 160 void tee_pager_merge_um_region(struct user_mode_ctx *uctx, vaddr_t va, 164 tee_pager_merge_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_merge_um_region() 177 void tee_pager_rem_um_regions(struct user_mode_ctx *uctx); 192 void tee_pager_assign_um_tables(struct user_mode_ctx *uctx); [all …]
|
/optee_os/core/kernel/ |
A D | user_ta.c | 158 usr_stack = utc->uctx.stack_ptr; in user_ta_enter() 168 utc->uctx.entry_func, utc->uctx.is_32bit, in user_ta_enter() 172 thread_user_clear_vfp(&utc->uctx); in user_ta_enter() 195 vm_clean_param(&utc->uctx); in user_ta_enter() 241 if (utc->uctx.dump_entry_func) { in user_ta_dump_state() 340 tee_pager_rem_um_regions(&utc->uctx); in free_utc() 352 vm_info_final(&utc->uctx); in free_utc() 424 utc->uctx.is_initializing = true; in tee_ta_init_user_ta_session() 441 res = vm_info_init(&utc->uctx); in tee_ta_init_user_ta_session() 462 res = ldelf_load_ldelf(&utc->uctx); in tee_ta_init_user_ta_session() [all …]
|
A D | ldelf_syscalls.c | 78 res = vm_get_flags(uctx, va, sz, &vm_flags); in ldelf_syscall_unmap() 84 return vm_unmap(uctx, va, sz); in ldelf_syscall_unmap() 111 res = vm_check_access_rights(uctx, in ldelf_syscall_open_bin() 118 res = vm_check_access_rights(uctx, in ldelf_syscall_open_bin() 325 vm_set_ctx(uctx->ts_ctx); in ldelf_syscall_map_bin() 392 vm_set_ctx(uctx->ts_ctx); in ldelf_syscall_map_bin() 406 if (vm_unmap(uctx, *va, num_rounded_bytes)) in ldelf_syscall_map_bin() 413 vm_set_ctx(uctx->ts_ctx); in ldelf_syscall_map_bin() 431 res = vm_check_access_rights(uctx, in ldelf_syscall_copy_from_bin() 495 return vm_set_prot(uctx, va, sz, prot); in ldelf_syscall_set_prot() [all …]
|
A D | user_mode_ctx.c | 10 void user_mode_ctx_print_mappings(struct user_mode_ctx *uctx) in user_mode_ctx_print_mappings() argument 16 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) { in user_mode_ctx_print_mappings()
|
/optee_os/core/include/kernel/ |
A D | ldelf_loader.h | 13 TEE_Result ldelf_load_ldelf(struct user_mode_ctx *uctx); 15 struct user_mode_ctx *uctx); 16 TEE_Result ldelf_dump_state(struct user_mode_ctx *uctx); 17 TEE_Result ldelf_dump_ftrace(struct user_mode_ctx *uctx, 19 TEE_Result ldelf_dlopen(struct user_mode_ctx *uctx, TEE_UUID *uuid, 21 TEE_Result ldelf_dlsym(struct user_mode_ctx *uctx, TEE_UUID *uuid,
|
A D | user_mode_ctx.h | 25 return &to_user_ta_ctx(ctx)->uctx; in to_user_mode_ctx() 27 return &to_sp_ctx(ctx)->uctx; in to_user_mode_ctx() 29 return &to_stmm_ctx(ctx)->uctx; in to_user_mode_ctx()
|
A D | user_ta.h | 41 struct user_mode_ctx uctx; member
|
/optee_os/core/pta/ |
A D | system.c | 89 res = vm_check_access_rights(uctx, access_flags, in system_derive_ta_unique_key() 103 memcpy(data, &uctx->ts_ctx->uuid, sizeof(TEE_UUID)); in system_derive_ta_unique_key() 155 res = vm_map_pad(uctx, &va, num_bytes, prot, vm_flags, in system_map_zi() 195 res = vm_get_flags(uctx, va, sz, &vm_flags); in system_unmap() 201 return vm_unmap(uctx, va, sz); in system_unmap() 227 res = ldelf_dlopen(uctx, uuid, flags); in system_dlopen() 260 res = ldelf_dlsym(uctx, uuid, sym, maxlen, &va); in system_dlsym() 342 return system_map_zi(uctx, param_types, params); in invoke_command() 344 return system_unmap(uctx, param_types, params); in invoke_command() 346 return system_dlopen(uctx, param_types, params); in invoke_command() [all …]
|
/optee_os/core/arch/arm/mm/ |
A D | tee_pager.c | 744 if (!uctx->regions) { in pager_add_um_region() 745 uctx->regions = malloc(sizeof(*uctx->regions)); in pager_add_um_region() 746 if (!uctx->regions) in pager_add_um_region() 748 TAILQ_INIT(uctx->regions); in pager_add_um_region() 751 reg = TAILQ_FIRST(uctx->regions); in pager_add_um_region() 824 if (uctx->ts_ctx == tsd->ctx) { in tee_pager_add_um_region() 1053 rem_region(uctx->regions, reg); in tee_pager_rem_um_region() 1057 tlbi_asid(uctx->vm_info.asid); in tee_pager_rem_um_region() 1064 if (!uctx->regions) in tee_pager_rem_um_regions() 1075 free(uctx->regions); in tee_pager_rem_um_regions() [all …]
|
A D | core_mmu_private.h | 17 struct user_mode_ctx *uctx);
|
A D | sp_mem.c | 192 void *sp_mem_get_va(const struct user_mode_ctx *uctx, size_t offset, in sp_mem_get_va() argument 197 TAILQ_FOREACH(region, &uctx->vm_info.regions, link) { in sp_mem_get_va()
|
/optee_os/core/arch/arm/tee/ |
A D | svc_cache.c | 27 if (vm_buf_intersects_um_private(&utc->uctx, va, len)) in syscall_cache_operation() 30 res = vm_check_access_rights(&utc->uctx, in syscall_cache_operation()
|
A D | arch_svc.c | 365 if (vm_check_access_rights(&utc->uctx, in save_panic_stack() 430 if (vm_check_access_rights(&utc->uctx, in save_panic_stack() 434 utc->uctx.is_32bit ? in save_panic_stack() 447 if (utc->uctx.is_32bit) in save_panic_stack()
|
/optee_os/core/arch/arm/include/kernel/ |
A D | secure_partition.h | 50 struct user_mode_ctx uctx; member 83 struct user_mode_ctx *uctx);
|
A D | stmm_sp.h | 135 struct user_mode_ctx uctx; member
|
/optee_os/core/tee/ |
A D | tee_svc_cryp.c | 1391 res = vm_check_access_rights(&utc->uctx, in copy_in_attrs() 2549 res = vm_check_access_rights(&utc->uctx, in syscall_cipher_init() 2614 struct user_mode_ctx *uctx = &to_user_ta_ctx(sess->ctx)->uctx; in tee_svc_cipher_update_helper() local 3350 struct user_mode_ctx *uctx = &to_user_ta_ctx(sess->ctx)->uctx; in syscall_authenc_enc_final() local 3369 res = vm_check_access_rights(uctx, in syscall_authenc_enc_final() 3383 res = vm_check_access_rights(uctx, in syscall_authenc_enc_final() 3401 res = vm_check_access_rights(uctx, in syscall_authenc_enc_final() 3435 struct user_mode_ctx *uctx = &to_user_ta_ctx(sess->ctx)->uctx; in syscall_authenc_dec_final() local 3453 res = vm_check_access_rights(uctx, in syscall_authenc_dec_final() 3467 res = vm_check_access_rights(uctx, in syscall_authenc_dec_final() [all …]
|
A D | tee_svc_storage.c | 195 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_READ, in syscall_storage_obj_open() 336 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_READ, in syscall_storage_obj_create() 353 res = vm_check_access_rights(&utc->uctx, f, in syscall_storage_obj_create() 491 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_READ, in syscall_storage_obj_rename() 619 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_WRITE, in syscall_storage_next_enum() 624 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_WRITE, in syscall_storage_next_enum() 706 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_WRITE, in syscall_storage_obj_read() 762 res = vm_check_access_rights(&utc->uctx, TEE_MEMORY_ACCESS_READ, in syscall_storage_obj_write()
|
A D | tee_svc.c | 549 if (vm_check_access_rights(&utc->uctx, flags, a, b)) in utee_param_to_param() 627 res = vm_check_access_rights(&utc->uctx, flags, in tee_svc_copy_param() 660 if (vm_buf_is_inside_um_private(&utc->uctx, va, s)) { in tee_svc_copy_param() 668 res = vm_buf_to_mboj_offs(&utc->uctx, va, s, in tee_svc_copy_param() 931 return vm_check_access_rights(&to_user_ta_ctx(s->ctx)->uctx, flags, in syscall_check_access_rights()
|