Home
last modified time | relevance | path

Searched refs:va (Results 1 – 25 of 72) sorted by relevance

123

/xen/xen/common/
A Dvmap.c25 unsigned long va; in vm_init_type() local
213 vunmap(va); in __vmap()
214 va = NULL; in __vmap()
218 return va; in __vmap()
239 vm_free(va); in vunmap()
247 void *va; in vmalloc_type() local
265 if ( va == NULL ) in vmalloc_type()
269 return va; in vmalloc_type()
302 void vfree(void *va) in vfree() argument
309 if ( !va ) in vfree()
[all …]
/xen/xen/include/asm-arm/
A Dlpae.h190 static inline paddr_t third_table_offset_##gran##K(paddr_t va) \
192 return TABLE_OFFSET((va >> third_shift(gran##K)), gran##K); \
195 static inline paddr_t second_table_offset_##gran##K(paddr_t va) \
264 #define zeroeth_linear_offset(va) ((va) >> ZEROETH_SHIFT) argument
265 #define first_linear_offset(va) ((va) >> FIRST_SHIFT) argument
266 #define second_linear_offset(va) ((va) >> SECOND_SHIFT) argument
267 #define third_linear_offset(va) ((va) >> THIRD_SHIFT) argument
270 #define first_table_offset(va) TABLE_OFFSET(first_linear_offset(va)) argument
271 #define second_table_offset(va) TABLE_OFFSET(second_linear_offset(va)) argument
272 #define third_table_offset(va) TABLE_OFFSET(third_linear_offset(va)) argument
[all …]
A Dflushtlb.h35 static inline void flush_xen_tlb_range_va_local(vaddr_t va, in flush_xen_tlb_range_va_local() argument
38 vaddr_t end = va + size; in flush_xen_tlb_range_va_local()
41 while ( va < end ) in flush_xen_tlb_range_va_local()
43 __flush_xen_tlb_one_local(va); in flush_xen_tlb_range_va_local()
44 va += PAGE_SIZE; in flush_xen_tlb_range_va_local()
54 static inline void flush_xen_tlb_range_va(vaddr_t va, in flush_xen_tlb_range_va() argument
57 vaddr_t end = va + size; in flush_xen_tlb_range_va()
60 while ( va < end ) in flush_xen_tlb_range_va()
62 __flush_xen_tlb_one(va); in flush_xen_tlb_range_va()
63 va += PAGE_SIZE; in flush_xen_tlb_range_va()
A Dmm.h237 #define vmap_to_mfn(va) maddr_to_mfn(virt_to_maddr((vaddr_t)va)) argument
238 #define vmap_to_page(va) mfn_to_page(vmap_to_mfn(va)) argument
243 static inline paddr_t __virt_to_maddr(vaddr_t va) in __virt_to_maddr() argument
245 uint64_t par = va_to_par(va); in __virt_to_maddr()
248 #define virt_to_maddr(va) __virt_to_maddr((vaddr_t)(va)) argument
276 uint64_t par = gva_to_ma_par(va, flags); in gvirt_to_maddr()
288 #define __virt_to_mfn(va) (virt_to_maddr(va) >> PAGE_SHIFT) argument
296 #define virt_to_mfn(va) __virt_to_mfn(va) argument
302 unsigned long va = (unsigned long)v; in virt_to_page() local
305 ASSERT(va >= XENHEAP_VIRT_START); in virt_to_page()
[all …]
A Dpage.h265 static inline uint64_t va_to_par(vaddr_t va) in va_to_par() argument
267 uint64_t par = __va_to_par(va); in va_to_par()
271 dump_hyp_walk(va); in va_to_par()
277 static inline int gva_to_ipa(vaddr_t va, paddr_t *paddr, unsigned int flags) in gva_to_ipa() argument
279 uint64_t par = gva_to_ipa_par(va, flags); in gva_to_ipa()
282 *paddr = (par & PADDR_MASK & PAGE_MASK) | ((unsigned long) va & ~PAGE_MASK); in gva_to_ipa()
/xen/xen/include/xen/
A Ddomain_page.h32 void unmap_domain_page(const void *va);
37 mfn_t domain_page_map_to_mfn(const void *va);
45 void unmap_domain_page_global(const void *va);
58 #define unmap_domain_page(va) ((void)(va)) argument
59 #define domain_page_map_to_mfn(va) _mfn(virt_to_mfn((unsigned long)(va))) argument
71 static inline void unmap_domain_page_global(const void *va) {}; in unmap_domain_page_global() argument
A Dvmap.h24 void vfree(void *va);
28 static inline void iounmap(void __iomem *va) in iounmap() argument
30 unsigned long addr = (unsigned long)(void __force *)va; in iounmap()
/xen/xen/include/asm-arm/arm64/
A Dpage.h49 static inline uint64_t __va_to_par(vaddr_t va) in __va_to_par() argument
53 asm volatile ("at s1e2r, %0;" : : "r" (va)); in __va_to_par()
61 static inline uint64_t gva_to_ma_par(vaddr_t va, unsigned int flags) in gva_to_ma_par() argument
66 asm volatile ("at s12e1w, %0;" : : "r" (va)); in gva_to_ma_par()
68 asm volatile ("at s12e1r, %0;" : : "r" (va)); in gva_to_ma_par()
75 static inline uint64_t gva_to_ipa_par(vaddr_t va, unsigned int flags) in gva_to_ipa_par() argument
80 asm volatile ("at s1e1w, %0;" : : "r" (va)); in gva_to_ipa_par()
82 asm volatile ("at s1e1r, %0;" : : "r" (va)); in gva_to_ipa_par()
A Dflushtlb.h46 static inline void __flush_xen_tlb_one_local(vaddr_t va) in __flush_xen_tlb_one_local() argument
48 asm volatile("tlbi vae2, %0;" : : "r" (va>>PAGE_SHIFT) : "memory"); in __flush_xen_tlb_one_local()
52 static inline void __flush_xen_tlb_one(vaddr_t va) in __flush_xen_tlb_one() argument
54 asm volatile("tlbi vae2is, %0;" : : "r" (va>>PAGE_SHIFT) : "memory"); in __flush_xen_tlb_one()
/xen/xen/include/asm-arm/arm32/
A Dpage.h65 static inline uint64_t __va_to_par(vaddr_t va) in __va_to_par() argument
69 WRITE_CP32(va, ATS1HR); in __va_to_par()
77 static inline uint64_t gva_to_ma_par(vaddr_t va, unsigned int flags) in gva_to_ma_par() argument
82 WRITE_CP32(va, ATS12NSOPW); in gva_to_ma_par()
84 WRITE_CP32(va, ATS12NSOPR); in gva_to_ma_par()
90 static inline uint64_t gva_to_ipa_par(vaddr_t va, unsigned int flags) in gva_to_ipa_par() argument
95 WRITE_CP32(va, ATS1CPW); in gva_to_ipa_par()
97 WRITE_CP32(va, ATS1CPR); in gva_to_ipa_par()
A Dflushtlb.h44 static inline void __flush_xen_tlb_one_local(vaddr_t va) in __flush_xen_tlb_one_local() argument
46 asm volatile(STORE_CP32(0, TLBIMVAH) : : "r" (va) : "memory"); in __flush_xen_tlb_one_local()
50 static inline void __flush_xen_tlb_one(vaddr_t va) in __flush_xen_tlb_one() argument
52 asm volatile(STORE_CP32(0, TLBIMVAHIS) : : "r" (va) : "memory"); in __flush_xen_tlb_one()
/xen/xen/arch/x86/
A Ddomain_page.c184 if ( !va || va >= DIRECTMAP_VIRT_START ) in unmap_domain_page()
187 ASSERT(va >= MAPCACHE_VIRT_START && va < MAPCACHE_VIRT_END); in unmap_domain_page()
195 idx = PFN_DOWN(va - MAPCACHE_VIRT_START); in unmap_domain_page()
322 unsigned long va = (unsigned long)ptr; in unmap_domain_page_global() local
324 if ( va >= DIRECTMAP_VIRT_START ) in unmap_domain_page_global()
327 ASSERT(va >= VMAP_VIRT_START && va < VMAP_VIRT_END); in unmap_domain_page_global()
335 unsigned long va = (unsigned long)ptr; in domain_page_map_to_mfn() local
338 if ( va >= DIRECTMAP_VIRT_START ) in domain_page_map_to_mfn()
341 if ( va >= VMAP_VIRT_START && va < VMAP_VIRT_END ) in domain_page_map_to_mfn()
343 pl1e = virt_to_xen_l1e(va); in domain_page_map_to_mfn()
[all …]
A Ddebug.c117 char *va; in dbg_rw_guest_mem() local
131 va = map_domain_page(mfn); in dbg_rw_guest_mem()
132 va = va + (addr & (PAGE_SIZE-1)); in dbg_rw_guest_mem()
136 copy_from_user(va, buf, pagecnt); /* va = buf */ in dbg_rw_guest_mem()
141 copy_to_user(buf, va, pagecnt); /* buf = va */ in dbg_rw_guest_mem()
144 unmap_domain_page(va); in dbg_rw_guest_mem()
/xen/xen/include/asm-x86/x86_64/
A Dpage.h55 #define virt_to_pdx(va) (((unsigned long)(va) - DIRECTMAP_VIRT_START) >> \ argument
60 static inline unsigned long __virt_to_maddr(unsigned long va) in __virt_to_maddr() argument
62 ASSERT(va < DIRECTMAP_VIRT_END); in __virt_to_maddr()
63 if ( va >= DIRECTMAP_VIRT_START ) in __virt_to_maddr()
64 va -= DIRECTMAP_VIRT_START; in __virt_to_maddr()
69 ASSERT(((long)va >> (PAGE_ORDER_1G + PAGE_SHIFT)) == in __virt_to_maddr()
72 va += xen_phys_start - XEN_VIRT_START; in __virt_to_maddr()
74 return (va & ma_va_bottom_mask) | in __virt_to_maddr()
75 ((va << pfn_pdx_hole_shift) & ma_top_mask); in __virt_to_maddr()
/xen/xen/arch/x86/guest/hyperv/
A Dtlb.c38 static unsigned int fill_gva_list(uint64_t *gva_list, const void *va, in fill_gva_list() argument
41 unsigned long cur = (unsigned long)va; in fill_gva_list()
72 static uint64_t flush_tlb_ex(const cpumask_t *mask, const void *va, in flush_tlb_ex() argument
107 if ( !va || (PAGE_SIZE << order) / HV_TLB_FLUSH_UNIT > max_gvas ) in flush_tlb_ex()
119 fill_gva_list(gva_list, va, order), in flush_tlb_ex()
126 int hyperv_flush_tlb(const cpumask_t *mask, const void *va, in hyperv_flush_tlb() argument
186 if ( !va || (PAGE_SIZE << order) / HV_TLB_FLUSH_UNIT > MAX_GVAS ) in hyperv_flush_tlb()
191 fill_gva_list(flush->gva_list, va, order), in hyperv_flush_tlb()
196 ret = flush_tlb_ex(mask, va, flags); in hyperv_flush_tlb()
/xen/xen/arch/x86/mm/
A Dguest_walk.c124 gw->va = va; in guest_walk_tables()
142 guest_l4_table_offset(va) * sizeof(gw->l4e); in guest_walk_tables()
145 gw->l4e = l4p[guest_l4_table_offset(va)]; in guest_walk_tables()
177 guest_l3_table_offset(va) * sizeof(gw->l3e); in guest_walk_tables()
180 gw->l3e = l3p[guest_l3_table_offset(va)]; in guest_walk_tables()
235 guest_l3_table_offset(va) * sizeof(gw->l3e); in guest_walk_tables()
277 l2gpa += guest_l2_table_offset(va) * sizeof(gw->l2e); in guest_walk_tables()
280 gw->l2e = l2p[guest_l2_table_offset(va)]; in guest_walk_tables()
341 guest_l1_table_offset(va)); in guest_walk_tables()
366 guest_l1_table_offset(va) * sizeof(gw->l1e); in guest_walk_tables()
[all …]
/xen/misc/coverity/
A Dmodel.c73 void xfree(void *va) in xfree() argument
75 __coverity_free__(va); in xfree()
105 void unmap_domain_page(const void *va) in unmap_domain_page() argument
107 unsigned long ptr = (unsigned long)va & PAGE_MASK; in unmap_domain_page()
/xen/xen/arch/arm/
A Dlivepatch.c17 #define virt_to_mfn(va) _mfn(__virt_to_mfn(va)) argument
144 int arch_livepatch_secure(const void *va, unsigned int pages, enum va_type type) in arch_livepatch_secure() argument
146 unsigned long start = (unsigned long)va; in arch_livepatch_secure()
149 ASSERT(va); in arch_livepatch_secure()
A Dmm.c48 #define virt_to_mfn(va) _mfn(__virt_to_mfn(va)) argument
422 vunmap(va); in unmap_domain_page_global()
431 vaddr_t va; in map_domain_page() local
481 va = (DOMHEAP_VIRT_START in map_domain_page()
491 return (void *)va; in map_domain_page()
501 if ( !va ) in unmap_domain_page()
521 if ( va >= VMAP_VIRT_START && va < VMAP_VIRT_END ) in domain_page_map_to_mfn()
522 return virt_to_mfn(va); in domain_page_map_to_mfn()
679 if ( !is_kernel(va) ) in setup_pagetables()
681 pte = pte_of_xenaddr(va); in setup_pagetables()
[all …]
/xen/xen/arch/x86/x86_64/
A Dmm.c131 unsigned long va; in m2p_mapped() local
136 l3e = l3e_from_l4e(idle_pg_table[l4_table_offset(va)], l3_table_offset(va)); in m2p_mapped()
254 unsigned long i, va, rwva; in destroy_m2p_mapping() local
281 l2_table_offset(va); in destroy_m2p_mapping()
380 unsigned long i, va, smap, emap; in setup_m2p_table() local
444 l2_table_offset(va); in setup_m2p_table()
479 unsigned long i, mpt_size, va; in paging_init() local
489 for ( va = DIRECTMAP_VIRT_START; in paging_init()
490 va < DIRECTMAP_VIRT_END && (void *)va < __va(mem_hotplug); in paging_init()
491 va += (1UL << L4_PAGETABLE_SHIFT) ) in paging_init()
[all …]
/xen/xen/include/asm-x86/guest/
A Dhypervisor.h38 int (*flush_tlb)(const cpumask_t *mask, const void *va, unsigned int flags);
54 int hypervisor_flush_tlb(const cpumask_t *mask, const void *va,
67 static inline int hypervisor_flush_tlb(const cpumask_t *mask, const void *va, in hypervisor_flush_tlb() argument
/xen/xen/include/asm-x86/
A Dtrace.h23 void __trace_trap_one_addr(unsigned event, unsigned long va);
24 static inline void trace_trap_one_addr(unsigned event, unsigned long va) in trace_trap_one_addr() argument
27 __trace_trap_one_addr(event, va); in trace_trap_one_addr()
A Dflushtlb.h141 unsigned int flush_area_local(const void *va, unsigned int flags);
145 void flush_area_mask(const cpumask_t *, const void *va, unsigned int flags);
149 #define flush_area_all(va, flags) flush_area_mask(&cpu_online_map, va, flags) argument
A Dhypercall.h98 unsigned long va,
108 unsigned long va,
165 unsigned int va, u32 lo, u32 hi, unsigned int flags);
168 unsigned int va, u32 lo, u32 hi, unsigned int flags, domid_t domid);
/xen/tools/libxl/
A Dflexarray.c88 va_list va; in flexarray_vappend() local
92 va_start(va, array); in flexarray_vappend()
93 for(ret = 0; (ptr = va_arg(va, void *)); ret++) { in flexarray_vappend()
97 va_end(va); in flexarray_vappend()

Completed in 33 milliseconds

123