Lines Matching refs:pfec
575 unsigned long linear, unsigned int bytes, uint32_t pfec, in hvmemul_map_linear_addr() argument
617 res = hvm_translate_get_page(curr, addr, true, pfec, in hvmemul_map_linear_addr()
653 if ( pfec & PFEC_write_access ) in hvmemul_map_linear_addr()
672 hvm_monitor_check_p2m(addr, gfn, pfec, npfec_kind_with_gla) ) in hvmemul_map_linear_addr()
751 uint32_t pfec, in hvmemul_linear_to_phys() argument
780 addr, &_paddr, bytes_per_rep, &one_rep, pfec, hvmemul_ctxt); in hvmemul_linear_to_phys()
785 else if ( (pfn = paging_gva_to_gfn(curr, addr, &pfec)) == gfn_x(INVALID_GFN) ) in hvmemul_linear_to_phys()
787 if ( pfec & (PFEC_page_paged | PFEC_page_shared) ) in hvmemul_linear_to_phys()
790 x86_emul_pagefault(pfec, addr, &hvmemul_ctxt->ctxt); in hvmemul_linear_to_phys()
800 npfn = paging_gva_to_gfn(curr, addr, &pfec); in hvmemul_linear_to_phys()
806 if ( pfec & (PFEC_page_paged | PFEC_page_shared) ) in hvmemul_linear_to_phys()
815 x86_emul_pagefault(pfec, addr & PAGE_MASK, &hvmemul_ctxt->ctxt); in hvmemul_linear_to_phys()
1055 uint32_t pfec, struct hvm_emulate_ctxt *hvmemul_ctxt, bool_t known_gpfn) in hvmemul_linear_mmio_access() argument
1074 rc = hvmemul_linear_to_phys(gla, &gpa, chunk, &one_rep, pfec, in hvmemul_linear_mmio_access()
1096 rc = hvmemul_linear_to_phys(gla, &gpa, chunk, &one_rep, pfec, in hvmemul_linear_mmio_access()
1107 uint32_t pfec, struct hvm_emulate_ctxt *hvmemul_ctxt, in hvmemul_linear_mmio_read() argument
1111 pfec, hvmemul_ctxt, translate); in hvmemul_linear_mmio_read()
1116 uint32_t pfec, struct hvm_emulate_ctxt *hvmemul_ctxt, in hvmemul_linear_mmio_write() argument
1120 pfec, hvmemul_ctxt, translate); in hvmemul_linear_mmio_write()
1123 static bool known_gla(unsigned long addr, unsigned int bytes, uint32_t pfec) in known_gla() argument
1127 if ( pfec & PFEC_write_access ) in known_gla()
1132 else if ( pfec & PFEC_insn_fetch ) in known_gla()
1145 uint32_t pfec, struct hvm_emulate_ctxt *hvmemul_ctxt) in linear_read() argument
1157 rc = linear_read(addr, part1, p_data, pfec, hvmemul_ctxt); in linear_read()
1160 pfec, hvmemul_ctxt); in linear_read()
1171 rc = hvm_copy_from_guest_linear(p_data, addr, bytes, pfec, &pfinfo); in linear_read()
1183 if ( pfec & PFEC_insn_fetch ) in linear_read()
1186 return hvmemul_linear_mmio_read(addr, bytes, p_data, pfec, in linear_read()
1188 known_gla(addr, bytes, pfec)); in linear_read()
1200 uint32_t pfec, struct hvm_emulate_ctxt *hvmemul_ctxt) in linear_write() argument
1212 rc = linear_write(addr, part1, p_data, pfec, hvmemul_ctxt); in linear_write()
1215 pfec, hvmemul_ctxt); in linear_write()
1226 rc = hvm_copy_to_guest_linear(addr, p_data, bytes, pfec, &pfinfo); in linear_write()
1238 return hvmemul_linear_mmio_write(addr, bytes, p_data, pfec, in linear_write()
1240 known_gla(addr, bytes, pfec)); in linear_write()
1260 uint32_t pfec = PFEC_page_present; in __hvmemul_read() local
1264 pfec |= PFEC_implicit; in __hvmemul_read()
1266 pfec |= PFEC_user_mode; in __hvmemul_read()
1268 pfec |= PFEC_insn_fetch; in __hvmemul_read()
1275 return linear_read(addr, bytes, p_data, pfec, hvmemul_ctxt); in __hvmemul_read()
1353 uint32_t pfec = PFEC_page_present | PFEC_write_access; in hvmemul_write() local
1358 pfec |= PFEC_implicit; in hvmemul_write()
1360 pfec |= PFEC_user_mode; in hvmemul_write()
1367 if ( !known_gla(addr, bytes, pfec) ) in hvmemul_write()
1369 mapping = hvmemul_map_linear_addr(addr, bytes, pfec, hvmemul_ctxt); in hvmemul_write()
1375 return linear_write(addr, bytes, p_data, pfec, hvmemul_ctxt); in hvmemul_write()
1402 uint32_t pfec = PFEC_page_present | PFEC_write_access; in hvmemul_rmw() local
1412 pfec |= PFEC_implicit; in hvmemul_rmw()
1414 pfec |= PFEC_user_mode; in hvmemul_rmw()
1416 if ( !known_gla(addr, bytes, pfec) ) in hvmemul_rmw()
1418 mapping = hvmemul_map_linear_addr(addr, bytes, pfec, hvmemul_ctxt); in hvmemul_rmw()
1434 rc = linear_read(addr, bytes, &data, pfec, hvmemul_ctxt); in hvmemul_rmw()
1438 rc = linear_write(addr, bytes, &data, pfec, hvmemul_ctxt); in hvmemul_rmw()
1456 uint32_t pfec = PFEC_page_present; in hvmemul_blk() local
1466 pfec |= PFEC_write_access; in hvmemul_blk()
1469 pfec |= PFEC_implicit; in hvmemul_blk()
1471 pfec |= PFEC_user_mode; in hvmemul_blk()
1473 mapping = hvmemul_map_linear_addr(addr, bytes, pfec, hvmemul_ctxt); in hvmemul_blk()
1601 uint32_t pfec = PFEC_page_present | PFEC_write_access; in hvmemul_cmpxchg() local
1612 pfec |= PFEC_implicit; in hvmemul_cmpxchg()
1614 pfec |= PFEC_user_mode; in hvmemul_cmpxchg()
1616 if ( !known_gla(addr, bytes, pfec) ) in hvmemul_cmpxchg()
1618 mapping = hvmemul_map_linear_addr(addr, bytes, pfec, hvmemul_ctxt); in hvmemul_cmpxchg()
1626 return hvmemul_linear_mmio_write(addr, bytes, p_new, pfec, in hvmemul_cmpxchg()
1706 uint32_t pfec = PFEC_page_present | PFEC_write_access; in hvmemul_rep_ins() local
1718 pfec |= PFEC_user_mode; in hvmemul_rep_ins()
1721 addr, &gpa, bytes_per_rep, reps, pfec, hvmemul_ctxt); in hvmemul_rep_ins()
1784 uint32_t pfec = PFEC_page_present; in hvmemul_rep_outs() local
1799 pfec |= PFEC_user_mode; in hvmemul_rep_outs()
1802 addr, &gpa, bytes_per_rep, reps, pfec, hvmemul_ctxt); in hvmemul_rep_outs()
1829 uint32_t pfec = PFEC_page_present; in hvmemul_rep_movs() local
1847 pfec |= PFEC_user_mode; in hvmemul_rep_movs()
1863 rc = hvmemul_linear_to_phys(saddr, &sgpa, bytes_per_rep, reps, pfec, in hvmemul_rep_movs()
1880 pfec | PFEC_write_access, hvmemul_ctxt); in hvmemul_rep_movs()
2016 uint32_t pfec = PFEC_page_present | PFEC_write_access; in hvmemul_rep_stos() local
2019 pfec |= PFEC_user_mode; in hvmemul_rep_stos()
2021 rc = hvmemul_linear_to_phys(addr, &gpa, bytes_per_rep, reps, pfec, in hvmemul_rep_stos()
2296 uint32_t pfec = PFEC_page_present; in hvmemul_cache_op() local
2318 pfec |= PFEC_user_mode; in hvmemul_cache_op()
2320 mapping = hvmemul_map_linear_addr(addr, 0, pfec, hvmemul_ctxt); in hvmemul_cache_op()
2885 unsigned int pfec = PFEC_page_present | PFEC_insn_fetch; in hvm_emulate_init_per_insn() local
2889 pfec |= PFEC_user_mode; in hvm_emulate_init_per_insn()
2901 pfec, NULL) == HVMTRANS_okay) ? in hvm_emulate_init_per_insn()