Home
last modified time | relevance | path

Searched refs:pa (Results 1 – 25 of 65) sorted by relevance

123

/optee_os/core/arch/arm/mm/
A Dcore_mmu.c208 return (pa >= map->pa && end_pa <= map->pa + map->size - 1); in pa_is_in_map()
267 if ((pa >= map->pa) && (pa <= (map->pa + map->size - 1))) in find_map_by_pa()
580 pa = memory_map[n].pa; in add_phys_mem()
586 memory_map[n].pa = MIN(pa, mem->addr); in add_phys_mem()
588 (pa - memory_map[n].pa); in add_phys_mem()
1046 rc = CMP_TRILEAN(mm_a->pa, mm_b->pa); in cmp_init_mem_map()
1357 *pa = map->pa + (vaddr_t)va - map->va; in core_va2pa_helper()
1359 *pa = 0; in core_va2pa_helper()
1369 return (void *)(vaddr_t)(map->va + pa - map->pa); in map_pa2va()
1549 pa = region->pa; in set_region()
[all …]
A Dcore_mmu_private.h26 static inline bool core_mmu_check_max_pa(paddr_t pa __maybe_unused) in core_mmu_check_max_pa()
29 return pa <= (BIT64(CFG_CORE_ARM64_PA_BITS) - 1); in core_mmu_check_max_pa()
31 return pa <= (BIT64(40) - 1); in core_mmu_check_max_pa()
38 static inline bool core_mmu_check_end_pa(paddr_t pa, size_t len) in core_mmu_check_end_pa() argument
42 if (ADD_OVERFLOW(pa, len - 1, &end_pa)) in core_mmu_check_end_pa()
A Dcore_mmu_v7.c248 if (pa & ~TTB_L1_MASK) in core_mmu_get_main_ttb_pa()
250 return pa; in core_mmu_get_main_ttb_pa()
262 if (pa & ~TTB_UL1_MASK) in core_mmu_get_ul1_ttb_pa()
264 return pa; in core_mmu_get_ul1_ttb_pa()
536 tbl[idx] = desc | pa; in core_mmu_set_entry_primitive()
575 paddr_t pa; in core_mmu_entry_to_finer_grained() local
628 if (pa) in core_mmu_get_entry_primitive()
712 paddr_t pa = 0; in map_memarea_sections() local
722 pa = mm->pa; in map_memarea_sections()
728 ttb[idx] = pa | attr; in map_memarea_sections()
[all …]
A Dtee_pager.c436 pa, attr); in tblidx_get_entry()
443 pa, attr); in tblidx_set_entry()
706 paddr_t pa; in get_pmem_pa() local
712 return pa; in get_pmem_pa()
786 paddr_t pa = 0; in map_pgts() local
804 assert(pa); in map_pgts()
1219 paddr_t pa = 0; in tee_pager_unhide_page() local
1474 paddr_t pa) in make_dirty_page() argument
1508 paddr_t pa = 0; in make_iv_available() local
1662 paddr_t pa = 0; in pager_update_permissions() local
[all …]
A Dsp_mem.c73 paddr_t pa, unsigned int num_pages) in sp_mem_add_pages() argument
82 if (!core_pbuf_is(CORE_MEM_NON_SEC, pa, num_pages * SMALL_PAGE_SIZE)) in sp_mem_add_pages()
86 ms->pages[n + *idx] = pa + n * SMALL_PAGE_SIZE; in sp_mem_add_pages()
108 size_t granule, paddr_t *pa) in get_pa() argument
113 if (!pa) in get_pa()
130 *pa = p; in get_pa()
A Dcore_mmu_lpae.c527 paddr_t pa = 0; in core_mmu_xlat_table_entry_pa2va() local
534 pa = entry & OUTPUT_ADDRESS_MASK; in core_mmu_xlat_table_entry_pa2va()
1024 paddr_t pa; in core_mmu_entry_to_finer_grained() local
1050 pa = *entry & OUTPUT_ADDRESS_MASK; in core_mmu_entry_to_finer_grained()
1053 new_table[i] = pa | attr | BLOCK_DESC; in core_mmu_entry_to_finer_grained()
1054 pa += block_size_on_next_lvl; in core_mmu_entry_to_finer_grained()
1066 paddr_t pa, uint32_t attr) in core_mmu_set_entry_primitive() argument
1071 tbl[idx] = desc | pa; in core_mmu_set_entry_primitive()
1075 size_t idx, paddr_t *pa, uint32_t *attr) in core_mmu_get_entry_primitive() argument
1079 if (pa) in core_mmu_get_entry_primitive()
[all …]
/optee_os/core/arch/arm/plat-stm32mp1/
A Dmain.c77 paddr_t pa; in console_init() member
80 [0] = { .pa = 0 }, in console_init()
81 [1] = { .pa = USART1_BASE, .secure = true, }, in console_init()
82 [2] = { .pa = USART2_BASE, .secure = false, }, in console_init()
83 [3] = { .pa = USART3_BASE, .secure = false, }, in console_init()
84 [4] = { .pa = UART4_BASE, .secure = false, }, in console_init()
85 [5] = { .pa = UART5_BASE, .secure = false, }, in console_init()
86 [6] = { .pa = USART6_BASE, .secure = false, }, in console_init()
87 [7] = { .pa = UART7_BASE, .secure = false, }, in console_init()
88 [8] = { .pa = UART8_BASE, .secure = false, }, in console_init()
[all …]
/optee_os/core/mm/
A Dmobj.c40 paddr_t pa; member
61 if (!pa) in mobj_phys_get_pa()
64 p = moph->pa + offs; in mobj_phys_get_pa()
73 *pa = p; in mobj_phys_get_pa()
152 if (pa) { in mobj_phys_init()
159 pa = map->pa; in mobj_phys_init()
176 moph->pa = pa; in mobj_phys_init()
347 paddr_t pa; member
382 *pa = p; in mobj_shm_get_pa()
443 m->pa = pa; in mobj_shm_alloc()
[all …]
/optee_os/core/arch/arm/tee/
A Dcache.c21 paddr_t pa; in cache_operation() local
23 pa = virt_to_phys(va); in cache_operation()
24 if (!pa) in cache_operation()
34 res = cache_op_outer(DCACHE_AREA_CLEAN_INV, pa, len); in cache_operation()
45 return cache_op_outer(DCACHE_AREA_CLEAN, pa, len); in cache_operation()
49 res = cache_op_outer(DCACHE_AREA_INVALIDATE, pa, len); in cache_operation()
A Dentry_fast.c33 paddr_t pa = 0; in tee_entry_fastcall_l2cc_mutex() local
37 ret = tee_get_l2cc_mutex(&pa); in tee_entry_fastcall_l2cc_mutex()
38 reg_pair_from_64(pa, &args->a2, &args->a3); in tee_entry_fastcall_l2cc_mutex()
41 pa = reg_pair_to_64(args->a2, args->a3); in tee_entry_fastcall_l2cc_mutex()
42 ret = tee_set_l2cc_mutex(&pa); in tee_entry_fastcall_l2cc_mutex()
/optee_os/core/lib/libtomcrypt/src/misc/
A Dmem_neq.c39 const unsigned char* pa; in mem_neq() local
45 pa = a; in mem_neq()
49 ret |= *pa ^ *pb; in mem_neq()
50 ++pa; in mem_neq()
/optee_os/lib/libutils/isoc/
A Dqsort.c78 char *pa, *pb, *pc, *pd, *pl, *pm, *pn; in qsort() local
103 pa = pb = (char *)a + es; in qsort()
110 swap(pa, pb); in qsort()
111 pa += es; in qsort()
138 r = min(pa - (char *)a, pb - pa); in qsort()
142 if ((r = pb - pa) > (int)es) in qsort()
/optee_os/core/include/mm/
A Dcore_memprot.h77 void *phys_to_virt(paddr_t pa, enum teecore_memtypes m, size_t len);
85 void *phys_to_virt_io(paddr_t pa, size_t len);
98 vaddr_t core_mmu_get_va(paddr_t pa, enum teecore_memtypes type, size_t len);
104 paddr_t pa; member
A Dmobj.h30 paddr_t *pa);
64 size_t granule, paddr_t *pa) in mobj_get_pa() argument
67 return mobj->ops->get_pa(mobj, offs, granule, pa); in mobj_get_pa()
232 struct mobj *mobj_phys_alloc(paddr_t pa, size_t size, uint32_t cattr,
255 paddr_t pa, unsigned int num_pages);
306 struct mobj *mobj_shm_alloc(paddr_t pa, size_t size, uint64_t cookie);
/optee_os/core/drivers/
A Dimx_i2c.c56 [0] = { .pa = I2C1_BASE, },
59 [1] = { .pa = I2C2_BASE, },
62 [2] = { .pa = I2C3_BASE, },
65 [3] = { .pa = I2C4_BASE, },
75 .base.pa = CCM_BASE,
91 .base.pa = IOMUXC_BASE,
445 static TEE_Result get_va(paddr_t pa, vaddr_t *va) in get_va() argument
512 if (i2c_bus[n].pa) { in i2c_map_controller()
513 if (get_va(i2c_bus[n].pa, &i2c_bus[n].va)) in i2c_map_controller()
528 if (get_va(i2c_clk.base.pa, &i2c_clk.base.va)) in i2c_init()
[all …]
A Dstm32_uart.c107 pd->base.pa = base; in stm32_uart_init()
116 stm32mp_register_secure_periph_iomem(pd->base.pa); in register_secure_uart()
126 stm32mp_register_non_secure_periph_iomem(pd->base.pa); in register_non_secure_uart()
153 pd->base.pa = info.reg; in stm32_uart_init_from_dt_node()
167 pd->base.va = (vaddr_t)phys_to_virt(pd->base.pa, in stm32_uart_init_from_dt_node()
A Dimx_rngb.c69 .base.pa = RNGB_BASE,
122 rngb.base.pa, rngb.size); in map_controller_static()
163 rngb.base.pa = virt_to_phys((void *)rngb.base.va); in map_controller()
/optee_os/core/kernel/
A Duser_mode_ctx.c17 paddr_t pa = 0; in user_mode_ctx_print_mappings() local
20 mobj_get_pa(r->mobj, r->offset, 0, &pa); in user_mode_ctx_print_mappings()
25 n, PRIxVA_WIDTH, r->va, PRIxPA_WIDTH, pa, r->size, in user_mode_ctx_print_mappings()
/optee_os/core/drivers/crypto/caam/utils/
A Dutils_mem.c240 paddr_t pa = 0; in caam_mem_get_pa_area() local
273 pa = virt_to_phys((void *)va); in caam_mem_get_pa_area()
274 if (!pa) in caam_mem_get_pa_area()
280 pabufs[nb_pa_area].paddr = pa; in caam_mem_get_pa_area()
284 pa, va); in caam_mem_get_pa_area()
306 if (next_pa != (pa + len_tohandle)) { in caam_mem_get_pa_area()
319 pa = next_pa; in caam_mem_get_pa_area()
/optee_os/core/arch/arm/include/mm/
A Dcore_mmu.h506 paddr_t pa, uint32_t attr);
518 paddr_t pa, uint32_t attr);
521 paddr_t *pa, uint32_t *attr);
531 paddr_t *pa, uint32_t *attr);
564 struct core_mmu_table_info *tbl_info, paddr_t pa) in core_mmu_get_block_offset() argument
566 return pa & ((1 << tbl_info->shift) - 1); in core_mmu_get_block_offset()
635 enum teecore_memtypes core_mmu_get_type_by_pa(paddr_t pa);
697 TEE_Result cache_op_outer(enum cache_op op, paddr_t pa, size_t len);
700 paddr_t pa __unused, in cache_op_outer()
/optee_os/core/arch/arm/kernel/
A Dthread_optee_smc.c295 if (core_pbuf_is(CORE_MEM_NSEC_SHM, pa, sz)) in rpc_shm_mobj_alloc()
296 return mobj_shm_alloc(pa, sz, cookie); in rpc_shm_mobj_alloc()
299 !(pa & SMALL_PAGE_MASK) && sz <= SMALL_PAGE_SIZE) in rpc_shm_mobj_alloc()
300 return mobj_mapped_shm_alloc(&pa, 1, 0, cookie); in rpc_shm_mobj_alloc()
314 paddr_t pa; in thread_rpc_alloc_arg() local
324 pa = reg_pair_to_64(rpc_args[0], rpc_args[1]); in thread_rpc_alloc_arg()
328 if (!IS_ALIGNED_WITH_TYPE(pa, struct optee_msg_arg)) in thread_rpc_alloc_arg()
331 mobj = rpc_shm_mobj_alloc(pa, size, co); in thread_rpc_alloc_arg()
363 paddr_t pa = 0; in set_tmem() local
373 if (mobj_get_pa(mobj, tpm->u.memref.offs, 0, &pa)) in set_tmem()
[all …]
A Dvirtualization.c109 map[i].pa = tee_data; in prepare_memory_map()
122 map[entries - 1].pa = ta_ram; in prepare_memory_map()
132 map[i].region_size, map[i].pa, map[i].va, in prepare_memory_map()
158 map->type, map->pa, map->pa + map->size); in virt_init_memory()
159 if (!tee_mm_alloc2(&virt_mapper_pool, map->pa, in virt_init_memory()
A Dtee_l2cc_mutex.c27 void tee_l2cc_store_mutex_boot_pa(uint32_t pa) in tee_l2cc_store_mutex_boot_pa() argument
29 l2cc_mutex_boot_pa = pa; in tee_l2cc_store_mutex_boot_pa()
/optee_os/core/tee/
A Dentry_std.c46 const paddr_t pa, in param_mem_from_mobj() argument
54 if (!core_is_buffer_inside(pa, MAX(sz, 1UL), b, mobj->size)) in param_mem_from_mobj()
58 mem->offs = pa - b; in param_mem_from_mobj()
102 paddr_t pa = READ_ONCE(tmem->buf_ptr); in set_tmem_param() local
108 if (!pa) { in set_tmem_param()
119 mem->mobj = msg_param_mobj_from_noncontig(pa, sz, shm_ref, in set_tmem_param()
130 if (param_mem_from_mobj(mem, shm_mobj, pa, sz)) in set_tmem_param()
137 if (param_mem_from_mobj(mem, *mobj, pa, sz)) in set_tmem_param()
/optee_os/core/arch/arm/plat-imx/pm/
A Dpm-imx7.c110 map.pa = phys_addr[i]; in pm_imx7_iram_tbl_init()
123 map.pa = ROUNDDOWN(IRAM_S_BASE, CORE_MMU_PGDIR_SIZE); in pm_imx7_iram_tbl_init()
124 map.va = (vaddr_t)phys_to_virt(map.pa, MEM_AREA_TEE_COHERENT, in pm_imx7_iram_tbl_init()
132 map.pa = GIC_BASE; in pm_imx7_iram_tbl_init()

Completed in 45 milliseconds

123