/xen/tools/fuzz/x86_instruction_emulator/ |
A D | fuzz-emul.c | 171 struct x86_emulate_ctxt *ctxt) in fuzz_read() argument 194 struct x86_emulate_ctxt *ctxt) in fuzz_read_io() argument 492 if ( !check_state(ctxt) ) in fuzz_write_cr() 662 printf("addr / sp size: %d / %d\n", ctxt->addr_size, ctxt->sp_size); in dump_state() 696 ctxt->lma = long_mode_active(ctxt); in set_sizes() 698 if ( in_longmode(ctxt) ) in set_sizes() 699 ctxt->addr_size = ctxt->sp_size = 64; in set_sizes() 925 sanitize_input(&ctxt); in LLVMFuzzerTestOneInput() 927 disable_hooks(&ctxt); in LLVMFuzzerTestOneInput() 933 set_sizes(&ctxt); in LLVMFuzzerTestOneInput() [all …]
|
/xen/tools/libacpi/ |
A D | build.c | 93 madt = ctxt->mem_ops.alloc(ctxt, sz, 16); in construct_madt() 169 ctxt->mem_ops.v2p(ctxt, &madt->header.checksum); in construct_madt() 179 hpet = ctxt->mem_ops.alloc(ctxt, sizeof(*hpet), 16); in construct_hpet() 203 waet = ctxt->mem_ops.alloc(ctxt, sizeof(*waet), 16); in construct_waet() 227 p = ctxt->mem_ops.alloc(ctxt, size, 16); in construct_srat() 282 slit = ctxt->mem_ops.alloc(ctxt, size, 16); in construct_slit() 497 info->vm_gid_addr = ctxt->mem_ops.v2p(ctxt, buf); in new_vm_gid() 595 fadt = ctxt->mem_ops.alloc(ctxt, fadt_size, 16); in acpi_build_tables() 611 fadt->dsdt = ctxt->mem_ops.v2p(ctxt, dsdt); in acpi_build_tables() 612 fadt->x_dsdt = ctxt->mem_ops.v2p(ctxt, dsdt); in acpi_build_tables() [all …]
|
A D | libacpi.h | 51 void *(*alloc)(struct acpi_ctxt *ctxt, uint32_t size, uint32_t align); 52 void (*free)(struct acpi_ctxt *ctxt, void *v, uint32_t size); 53 unsigned long (*v2p)(struct acpi_ctxt *ctxt, void *v); 101 int acpi_build_tables(struct acpi_ctxt *ctxt, struct acpi_config *config);
|
/xen/xen/arch/x86/x86_emulate/ |
A D | x86_emulate.h | 695 struct x86_emulate_ctxt *ctxt, 704 struct x86_emulate_ctxt *ctxt, 747 struct x86_emulate_ctxt *ctxt); 751 struct x86_emulate_ctxt *ctxt, 825 ASSERT(!ctxt->event_pending); in x86_emul_hw_exception() 827 ctxt->event.vector = vector; in x86_emul_hw_exception() 831 ctxt->event_pending = true; in x86_emul_hw_exception() 837 ASSERT(!ctxt->event_pending); in x86_emul_pagefault() 842 ctxt->event.cr2 = cr2; in x86_emul_pagefault() 844 ctxt->event_pending = true; in x86_emul_pagefault() [all …]
|
A D | x86_emulate.c | 1920 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & X86_EFLAGS_VM)); in in_protmode() 2037 ctxt->retire.raw = 0; in init_context() 3529 ctxt->opcode = opcode; in x86_decode() 3713 init_context(ctxt); in x86_emulate() 3742 b = ctxt->opcode; in x86_emulate() 4845 ctxt->event.insn_len = _regs.r(ip) - ctxt->regs->r(ip); in x86_emulate() 6095 if ( ctxt->lma ) in x86_emulate() 6887 generate_exception_if(amd_like(ctxt) && ctxt->lma, EXC_UD); in x86_emulate() 6932 generate_exception_if(amd_like(ctxt) && ctxt->lma, EXC_UD); in x86_emulate() 12110 typeof(ctxt->retire) retire = ctxt->retire; in x86_emulate_wrapper() [all …]
|
/xen/xen/arch/x86/hvm/svm/ |
A D | emulate.c | 68 struct hvm_emulate_ctxt ctxt; in svm_get_insn_len() local 86 hvm_emulate_init_per_insn(&ctxt, NULL, 0); in svm_get_insn_len() 87 state = x86_decode_insn(&ctxt.ctxt, hvmemul_insn_fetch); in svm_get_insn_len() 91 emul_len = x86_insn_length(state, &ctxt.ctxt); in svm_get_insn_len() 99 if ( instr_opcode == ctxt.ctxt.opcode ) in svm_get_insn_len() 114 &ctxt, X86EMUL_UNHANDLEABLE); in svm_get_insn_len() 126 struct hvm_emulate_ctxt ctxt; in svm_get_task_switch_insn_len() local 131 hvm_emulate_init_per_insn(&ctxt, NULL, 0); in svm_get_task_switch_insn_len() 132 state = x86_decode_insn(&ctxt.ctxt, hvmemul_insn_fetch); in svm_get_task_switch_insn_len() 136 emul_len = x86_insn_length(state, &ctxt.ctxt); in svm_get_task_switch_insn_len() [all …]
|
/xen/xen/arch/x86/pv/ |
A D | emul-priv-op.c | 111 p = ctxt->io_emul_stub; in io_emul_stub_setup() 119 quirk_bytes = ioemul_handle_quirk(opcode, p, ctxt->ctxt.regs); in io_emul_stub_setup() 350 struct priv_op_ctxt *poc = container_of(ctxt, struct priv_op_ctxt, ctxt); in read_io() 436 struct priv_op_ctxt *poc = container_of(ctxt, struct priv_op_ctxt, ctxt); in write_io() 583 struct priv_op_ctxt *poc = container_of(ctxt, struct priv_op_ctxt, ctxt); in rep_ins() 650 struct priv_op_ctxt *poc = container_of(ctxt, struct priv_op_ctxt, ctxt); in rep_outs() 1192 container_of(ctxt, struct priv_op_ctxt, ctxt); in insn_fetch() 1271 ctxt.ctxt.addr_size = ar & _SEGMENT_L ? 64 : ar & _SEGMENT_DB ? 32 : 16; in pv_emulate_privileged_op() 1273 rc = x86_emulate(&ctxt.ctxt, &priv_op_ops); in pv_emulate_privileged_op() 1291 if ( ctxt.ctxt.retire.singlestep ) in pv_emulate_privileged_op() [all …]
|
A D | ro-page-fault.c | 71 struct ptwr_emulate_ctxt *ptwr_ctxt = ctxt->data; in ptwr_emulated_update() 97 ctxt); in ptwr_emulated_update() 284 ctxt->data = &ptwr_ctxt; in ptwr_do_page_fault() 285 rc = x86_emulate(ctxt, &ptwr_emulate_ops); in ptwr_do_page_fault() 329 ctxt->data = &mmio_ro_ctxt; in mmio_ro_do_page_fault() 341 struct x86_emulate_ctxt ctxt = { in pv_ro_page_fault() local 363 rc = ptwr_do_page_fault(&ctxt, addr, pte); in pv_ro_page_fault() 377 ctxt.event.vector == TRAP_page_fault ) in pv_ro_page_fault() 378 pv_inject_event(&ctxt.event); in pv_ro_page_fault() 382 ctxt.event.type, ctxt.event.vector); in pv_ro_page_fault() [all …]
|
A D | emul-gate-op.c | 103 container_of(ctxt, struct gate_op_ctxt, ctxt); in read_mem() 126 sel = ctxt->regs->ss; in read_mem() 170 struct gate_op_ctxt ctxt = { .ctxt.regs = regs, .insn_fetch = true }; in pv_emulate_gate_op() local 196 if ( !pv_emul_read_descriptor(regs->cs, v, &ctxt.cs.base, &ctxt.cs.limit, in pv_emulate_gate_op() 206 ctxt.ctxt.addr_size = ar & _SEGMENT_DB ? 32 : 16; in pv_emulate_gate_op() 208 state = x86_decode_insn(&ctxt.ctxt, read_mem); in pv_emulate_gate_op() 213 pv_inject_event(&ctxt.ctxt.event); in pv_emulate_gate_op() 219 switch ( ctxt.ctxt.opcode ) in pv_emulate_gate_op() 244 &opnd_sel, sizeof(opnd_sel), &ctxt.ctxt); in pv_emulate_gate_op() 250 insn_len = x86_insn_length(state, &ctxt.ctxt); in pv_emulate_gate_op() [all …]
|
/xen/xen/arch/x86/cpu/ |
A D | vpmu_amd.c | 218 struct xen_pmu_amd_ctxt *ctxt; in amd_vpmu_load() local 226 ctxt = vpmu->context; in amd_vpmu_load() 242 ctxt = vpmu->context; in amd_vpmu_load() 317 ctxt = vpmu->context; in amd_vpmu_save() 502 struct xen_pmu_amd_ctxt *ctxt; in svm_vpmu_initialise() local 511 ctxt = xmalloc_bytes(sizeof(*ctxt) + regs_sz); in svm_vpmu_initialise() 512 if ( !ctxt ) in svm_vpmu_initialise() 520 ctxt->counters = sizeof(*ctxt); in svm_vpmu_initialise() 521 ctxt->ctrls = ctxt->counters + sizeof(uint64_t) * num_counters; in svm_vpmu_initialise() 522 amd_vpmu_init_regs(ctxt); in svm_vpmu_initialise() [all …]
|
/xen/xen/arch/x86/ |
A D | x86_emulate.c | 57 struct x86_emulate_ctxt *ctxt) in x86emul_read_xcr() argument 70 x86_emul_hw_exception(TRAP_gp_fault, 0, ctxt); in x86emul_read_xcr() 81 struct x86_emulate_ctxt *ctxt) in x86emul_write_xcr() argument 90 if ( ctxt ) in x86emul_write_xcr() 91 x86_emul_hw_exception(TRAP_gp_fault, 0, ctxt); in x86emul_write_xcr() 104 struct x86_emulate_ctxt *ctxt) in x86emul_read_dr() argument 137 if ( ctxt ) in x86emul_read_dr() 147 struct x86_emulate_ctxt *ctxt) in x86emul_write_dr() argument 160 x86_emul_hw_exception(TRAP_invalid_op, X86_EVENT_NO_EC, ctxt); in x86emul_write_dr() 164 x86_emul_hw_exception(TRAP_gp_fault, 0, ctxt); in x86emul_write_dr() [all …]
|
/xen/tools/libxc/ |
A D | xc_dom_arm.c | 119 memset(ctxt, 0, sizeof(*ctxt)); in vcpu_arm32() 136 ctxt->ttbr0 = 0; in vcpu_arm32() 137 ctxt->ttbr1 = 0; in vcpu_arm32() 145 ctxt->user_regs.cpsr, ctxt->user_regs.pc32); in vcpu_arm32() 163 memset(ctxt, 0, sizeof(*ctxt)); in vcpu_arm64() 170 ctxt->user_regs.x1 = 0; in vcpu_arm64() 171 ctxt->user_regs.x2 = 0; in vcpu_arm64() 172 ctxt->user_regs.x3 = 0; in vcpu_arm64() 178 ctxt->ttbr0 = 0; in vcpu_arm64() 179 ctxt->ttbr1 = 0; in vcpu_arm64() [all …]
|
/xen/xen/arch/arm/ |
A D | vpsci.c | 29 struct vcpu_guest_context *ctxt; in do_common_cpu_on() local 51 memset(ctxt, 0, sizeof(*ctxt)); in do_common_cpu_on() 53 ctxt->sctlr = SCTLR_GUEST_INIT; in do_common_cpu_on() 54 ctxt->ttbr0 = 0; in do_common_cpu_on() 55 ctxt->ttbr1 = 0; in do_common_cpu_on() 69 ctxt->user_regs.pc64 &= ~(u64)1; in do_common_cpu_on() 72 ctxt->user_regs.r0_usr = context_id; in do_common_cpu_on() 78 ctxt->user_regs.x0 = context_id; in do_common_cpu_on() 81 ctxt->flags = VGCF_online; in do_common_cpu_on() 84 rc = arch_set_info_guest(v, ctxt); in do_common_cpu_on() [all …]
|
A D | traps.c | 814 ctxt->ifsr32_el2, in show_registers_32() 816 ctxt->esr_el1 in show_registers_32() 818 ctxt->ifar, ctxt->ifsr, ctxt->dfar, ctxt->dfsr in show_registers_32() 935 struct reg_ctxt ctxt; in show_registers() local 958 struct reg_ctxt ctxt; in vcpu_show_registers() local 964 ctxt.dfar = v->arch.dfar; in vcpu_show_registers() 965 ctxt.ifar = v->arch.ifar; in vcpu_show_registers() 966 ctxt.dfsr = v->arch.dfsr; in vcpu_show_registers() 967 ctxt.ifsr = v->arch.ifsr; in vcpu_show_registers() 969 ctxt.far = v->arch.far; in vcpu_show_registers() [all …]
|
/xen/xen/arch/x86/hvm/ |
A D | emulate.c | 1286 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_read() 1304 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_insn_fetch() 1351 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_write() 1400 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rmw() 1454 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_blk() 1598 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_cmpxchg() 1687 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_validate() 1704 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rep_ins() 1782 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rep_outs() 2755 ctxt.ctxt.data = &mmio_ro_ctxt; in hvm_emulate_one_mmio() [all …]
|
A D | hvm.c | 928 memcpy(ctxt.fpu_regs, v->arch.fpu_ctxt, sizeof(ctxt.fpu_regs)); in hvm_save_cpu_ctxt() 1065 if ( ctxt.msr_tsc_aux != (uint32_t)ctxt.msr_tsc_aux ) in hvm_load_cpu_ctxt() 1297 err = validate_xstate(d, ctxt->xcr0, ctxt->xcr0_accum, in hvm_load_cpu_xsave_states() 1304 d->domain_id, vcpuid, ctxt->xfeature_mask, ctxt->xcr0_accum, in hvm_load_cpu_xsave_states() 1305 ctxt->xcr0, ctxt->save_area.xsave_hdr.xstate_bv, err); in hvm_load_cpu_xsave_states() 1383 ctxt->count = 0; in hvm_save_cpu_msrs() 1408 ctxt->msr[ctxt->count].index = msrs_to_send[i]; in hvm_save_cpu_msrs() 1409 ctxt->msr[ctxt->count++].val = val; in hvm_save_cpu_msrs() 1417 if ( ctxt->count ) in hvm_save_cpu_msrs() 1499 rc = guest_wrmsr(v, ctxt->msr[i].index, ctxt->msr[i].val); in hvm_load_cpu_msrs() [all …]
|
A D | save.c | 139 hvm_domain_context_t ctxt = { }; in hvm_save_one() local 155 ctxt.size = hvm_sr_handlers[typecode].size; in hvm_save_one() 156 ctxt.data = xmalloc_bytes(ctxt.size); in hvm_save_one() 157 if ( !ctxt.data ) in hvm_save_one() 165 if ( (rv = hvm_sr_handlers[typecode].save(v, &ctxt)) != 0 ) in hvm_save_one() 169 -ENODATA : -ENOENT), ctxt.cur >= sizeof(*desc) ) in hvm_save_one() 173 for ( off = 0; off <= (ctxt.cur - sizeof(*desc)); off += desc->length ) in hvm_save_one() 175 desc = (void *)(ctxt.data + off); in hvm_save_one() 178 if ( ctxt.cur < desc->length || in hvm_save_one() 179 off > ctxt.cur - desc->length ) in hvm_save_one() [all …]
|
/xen/xen/common/compat/ |
A D | domain.c | 54 struct vcpu_hvm_context ctxt; in compat_vcpu_op() local 56 if ( copy_from_guest(&ctxt, arg, 1) ) in compat_vcpu_op() 60 rc = v->is_initialised ? -EEXIST : arch_set_info_hvm_guest(v, &ctxt); in compat_vcpu_op() 65 struct compat_vcpu_guest_context *ctxt; in compat_vcpu_op() local 67 if ( (ctxt = xmalloc(struct compat_vcpu_guest_context)) == NULL ) in compat_vcpu_op() 70 if ( copy_from_guest(ctxt, arg, 1) ) in compat_vcpu_op() 72 xfree(ctxt); in compat_vcpu_op() 77 rc = v->is_initialised ? -EEXIST : arch_set_info_guest(v, ctxt); in compat_vcpu_op() 80 xfree(ctxt); in compat_vcpu_op()
|
/xen/xen/arch/x86/hvm/viridian/ |
A D | private.h | 25 struct hvm_viridian_vcpu_context *ctxt); 27 struct vcpu *v, const struct hvm_viridian_vcpu_context *ctxt); 30 const struct domain *d, struct hvm_viridian_domain_context *ctxt); 32 struct domain *d, const struct hvm_viridian_domain_context *ctxt); 46 const struct vcpu *v, struct hvm_viridian_vcpu_context *ctxt); 48 struct vcpu *v, const struct hvm_viridian_vcpu_context *ctxt); 51 const struct domain *d, struct hvm_viridian_domain_context *ctxt); 53 struct domain *d, const struct hvm_viridian_domain_context *ctxt);
|
A D | viridian.c | 516 uint64_t vcpu_mask = *(uint64_t *)ctxt; in need_flush() 781 viridian_time_save_domain_ctxt(d, &ctxt); in viridian_save_domain_ctxt() 791 struct hvm_viridian_domain_context ctxt; in viridian_load_domain_ctxt() local 797 vd->guest_os_id.raw = ctxt.guest_os_id; in viridian_load_domain_ctxt() 800 viridian_time_load_domain_ctxt(d, &ctxt); in viridian_load_domain_ctxt() 815 viridian_time_save_vcpu_ctxt(v, &ctxt); in viridian_save_vcpu_ctxt() 816 viridian_synic_save_vcpu_ctxt(v, &ctxt); in viridian_save_vcpu_ctxt() 826 struct hvm_viridian_vcpu_context ctxt; in viridian_load_vcpu_ctxt() local 838 if ( memcmp(&ctxt._pad, zero_page, sizeof(ctxt._pad)) ) in viridian_load_vcpu_ctxt() 841 viridian_synic_load_vcpu_ctxt(v, &ctxt); in viridian_load_vcpu_ctxt() [all …]
|
A D | synic.c | 401 BUILD_BUG_ON(ARRAY_SIZE(vv->sint) != ARRAY_SIZE(ctxt->sint_msr)); in viridian_synic_save_vcpu_ctxt() 404 ctxt->sint_msr[i] = vv->sint[i].as_uint64; in viridian_synic_save_vcpu_ctxt() 406 ctxt->simp_msr = vv->simp.msr.raw; in viridian_synic_save_vcpu_ctxt() 408 ctxt->apic_assist_pending = vv->apic_assist_pending; in viridian_synic_save_vcpu_ctxt() 409 ctxt->vp_assist_msr = vv->vp_assist.msr.raw; in viridian_synic_save_vcpu_ctxt() 413 struct vcpu *v, const struct hvm_viridian_vcpu_context *ctxt) in viridian_synic_load_vcpu_ctxt() argument 419 vv->vp_assist.msr.raw = ctxt->vp_assist_msr; in viridian_synic_load_vcpu_ctxt() 423 vv->apic_assist_pending = ctxt->apic_assist_pending; in viridian_synic_load_vcpu_ctxt() 425 vv->simp.msr.raw = ctxt->simp_msr; in viridian_synic_load_vcpu_ctxt() 433 vv->sint[i].as_uint64 = ctxt->sint_msr[i]; in viridian_synic_load_vcpu_ctxt() [all …]
|
/xen/tools/tests/x86_emulator/ |
A D | test_x86_emulator.c | 546 struct x86_emulate_ctxt *ctxt) in read() argument 601 struct x86_emulate_ctxt *ctxt) in fetch() argument 871 struct x86_emulate_ctxt ctxt; in main() local 886 ctxt.regs = ®s; in main() 887 ctxt.force_writeback = 0; in main() 888 ctxt.cpuid = &cp; in main() 4446 ctxt.lma = false; in main() 4447 ctxt.sp_size = ctxt.addr_size = 32; in main() 4474 ctxt.lma = true; in main() 4475 ctxt.sp_size = ctxt.addr_size = 64; in main() [all …]
|
A D | predicates.c | 548 s = x86_decode_insn(ctxt, fetch); in do_test() 550 if ( x86_insn_length(s, ctxt) != len ) in do_test() 577 s = x86_decode_insn(ctxt, fetch); in do_test() 579 if ( x86_insn_length(s, ctxt) != len ) in do_test() 585 if ( x86_insn_is_mem_access(s, ctxt) || in do_test() 586 x86_insn_is_mem_write(s, ctxt) ) in do_test() 607 ctxt->regs->eip = (unsigned long)instr; in predicates_test() 613 ctxt->addr_size = 32 << m; in predicates_test() 614 ctxt->sp_size = 32 << m; in predicates_test() 615 ctxt->lma = ctxt->sp_size == 64; in predicates_test() [all …]
|
A D | x86-emulate.c | 139 struct x86_emulate_ctxt *ctxt) in emul_test_cpuid() argument 177 struct x86_emulate_ctxt *ctxt) in emul_test_read_cr() argument 198 struct x86_emulate_ctxt *ctxt) in emul_test_read_xcr() argument 214 x86_emul_hw_exception(13 /* #GP */, 0, ctxt); in emul_test_read_xcr() 226 struct x86_emulate_ctxt *ctxt) in emul_test_get_fpu() argument 252 struct x86_emulate_ctxt *ctxt, in emul_test_put_fpu() argument
|
/xen/xen/arch/x86/mm/shadow/ |
A D | hvm.c | 102 0, &sh_ctxt->ctxt); in hvm_translate_virtual_addr() 156 struct x86_emulate_ctxt *ctxt) in hvm_emulate_read() argument 161 container_of(ctxt, struct sh_emulate_ctxt, ctxt)); in hvm_emulate_read() 172 container_of(ctxt, struct sh_emulate_ctxt, ctxt); in hvm_emulate_insn_fetch() 192 struct x86_emulate_ctxt *ctxt) in hvm_emulate_write() argument 195 container_of(ctxt, struct sh_emulate_ctxt, ctxt); in hvm_emulate_write() 247 struct x86_emulate_ctxt *ctxt) in hvm_emulate_cmpxchg() argument 250 container_of(ctxt, struct sh_emulate_ctxt, ctxt); in hvm_emulate_cmpxchg() 324 sh_ctxt->ctxt.regs = regs; in shadow_init_emulation() 332 if ( sh_ctxt->ctxt.lma && creg->l ) in shadow_init_emulation() [all …]
|