Home
last modified time | relevance | path

Searched refs:vm (Results 1 – 25 of 430) sorted by relevance

12345678910>>...18

/linux/drivers/virtio/
A Dvirtio_mem.c812 return start < vm->addr + vm->region_size && vm->addr < start + size; in virtio_mem_overlaps_range()
822 return start >= vm->addr && start + size <= vm->addr + vm->region_size; in virtio_mem_contains_range()
1294 sg_init_one(&sg_req, &vm->req, sizeof(vm->req)); in virtio_mem_send_request()
1310 return virtio16_to_cpu(vm->vdev, vm->resp.type); in virtio_mem_send_request()
2338 if (vm->requested_size > vm->plugged_size) { in virtio_mem_run_wq()
2339 diff = vm->requested_size - vm->plugged_size; in virtio_mem_run_wq()
2461 vm->sbm.next_mb_id = vm->sbm.first_mb_id; in virtio_mem_init_hotplug()
2483 vm->bbm.next_bb_id = vm->bbm.first_bb_id; in virtio_mem_init_hotplug()
2653 dev_info(&vm->vdev->dev, "nid: %d", vm->nid); in virtio_mem_init()
2676 vm->parent_resource = __request_mem_region(vm->addr, vm->region_size, in virtio_mem_create_resource()
[all …]
/linux/tools/testing/selftests/kvm/lib/aarch64/
A Dprocessor.c81 page_align(vm, ptrs_per_pgd(vm) * 8) / vm->page_size, in virt_pgd_alloc()
106 paddr, vm->max_gfn, vm->page_size); in _virt_pg_map()
108 ptep = addr_gpa2hva(vm, vm->pgd) + pgd_index(vm, vaddr) * 8; in _virt_pg_map()
114 ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pud_index(vm, vaddr) * 8; in _virt_pg_map()
119 ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pmd_index(vm, vaddr) * 8; in _virt_pg_map()
124 ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pte_index(vm, vaddr) * 8; in _virt_pg_map()
148 ptep = addr_gpa2hva(vm, vm->pgd) + pgd_index(vm, gva) * 8; in addr_gva2gpa()
154 ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pud_index(vm, gva) * 8; in addr_gva2gpa()
159 ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pmd_index(vm, gva) * 8; in addr_gva2gpa()
164 ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pte_index(vm, gva) * 8; in addr_gva2gpa()
[all …]
/linux/tools/testing/selftests/kvm/lib/
A Dkvm_util.c158 vm->fd = ioctl(vm->kvm_fd, KVM_CREATE_VM, vm->type); in vm_open()
224 vm = calloc(1, sizeof(*vm)); in vm_create()
268 TEST_ASSERT(vm->va_bits == 48 || vm->va_bits == 57, in vm_create()
291 vm->type = KVM_VM_TYPE_ARM_IPA_SIZE(vm->pa_bits); in vm_create()
299 0, (1ULL << (vm->va_bits - 1)) >> vm->page_shift); in vm_create()
302 (1ULL << (vm->va_bits - 1)) >> vm->page_shift); in vm_create()
305 vm->max_gfn = vm_compute_max_gfn(vm); in vm_create()
313 return vm; in vm_create()
380 return vm; in vm_create_with_vcpus()
707 amt = vm->page_size - (ptr1 % vm->page_size); in kvm_memcmp_hva_gva()
[all …]
/linux/tools/testing/selftests/kvm/lib/s390x/
A Dprocessor.c19 vm->page_size); in virt_pgd_alloc()
21 if (vm->pgd_created) in virt_pgd_alloc()
26 memset(addr_gpa2hva(vm, paddr), 0xff, PAGES_PER_REGION * vm->page_size); in virt_pgd_alloc()
28 vm->pgd = paddr; in virt_pgd_alloc()
29 vm->pgd_created = true; in virt_pgd_alloc()
43 memset(addr_gpa2hva(vm, taddr), 0xff, PAGES_PER_REGION * vm->page_size); in virt_alloc_region()
67 TEST_ASSERT((gpa >> vm->page_shift) <= vm->max_gfn, in virt_pg_map()
70 gva, vm->max_gfn, vm->page_size); in virt_pg_map()
73 entry = addr_gpa2hva(vm, vm->pgd); in virt_pg_map()
97 entry = addr_gpa2hva(vm, vm->pgd); in addr_gva2gpa()
[all …]
/linux/drivers/gpu/drm/lima/
A Dlima_vm.c18 struct lima_vm *vm; member
82 if (bo_va->vm == vm) { in lima_vm_bo_find()
118 bo_va->vm = vm; in lima_vm_bo_add()
204 vm = kzalloc(sizeof(*vm), GFP_KERNEL); in lima_vm_create()
205 if (!vm) in lima_vm_create()
208 vm->dev = dev; in lima_vm_create()
226 return vm; in lima_vm_create()
231 kfree(vm); in lima_vm_create()
245 vm->bts[i].cpu, vm->bts[i].dma); in lima_vm_release()
249 dma_free_wc(vm->dev->dev, LIMA_PAGE_SIZE, vm->pd.cpu, vm->pd.dma); in lima_vm_release()
[all …]
/linux/tools/testing/selftests/kvm/include/
A Dkvm_util.h129 void vm_create_irqchip(struct kvm_vm *vm);
150 vm_vaddr_t vm_vaddr_alloc_page(struct kvm_vm *vm);
177 void vcpu_run(struct kvm_vm *vm, uint32_t vcpuid);
255 void virt_pgd_alloc(struct kvm_vm *vm);
325 uint64_t vm_get_max_gfn(struct kvm_vm *vm);
326 int vm_get_fd(struct kvm_vm *vm);
352 #define sync_global_to_guest(vm, g) ({ \ argument
357 #define sync_global_from_guest(vm, g) ({ \ argument
380 void ucall_init(struct kvm_vm *vm, void *arg);
381 void ucall_uninit(struct kvm_vm *vm);
[all …]
/linux/drivers/gpu/drm/i915/gt/
A Dintel_gtt.c100 if (!atomic_dec_and_mutex_lock(&vm->open, &vm->mutex)) in __i915_vm_close()
125 if (vm->scratch[0]->base.resv == &vm->_resv) { in i915_vm_lock_objects()
154 kfree(vm); in i915_vm_resv_release()
162 vm->cleanup(vm); in __i915_vm_release()
165 i915_vm_resv_put(vm); in __i915_vm_release()
176 queue_work(vm->i915->wq, &vm->release_work); in i915_vm_release()
181 kref_init(&vm->ref); in i915_address_space_init()
202 i915_gem_shrinker_taints_mutex(vm->i915, &vm->mutex); in i915_address_space_init()
219 drm_mm_init(&vm->mm, 0, vm->total); in i915_address_space_init()
302 obj = vm->alloc_pt_dma(vm, size); in setup_scratch_page()
[all …]
A Dintel_ggtt.c63 ggtt->vm.cleanup(&ggtt->vm); in ggtt_init_hw()
142 ggtt->vm.clear_range(&ggtt->vm, 0, ggtt->vm.total); in i915_ggtt_suspend()
479 vm->clear_range(vm, vma->node.start, vma->size); in ggtt_unbind_vma()
596 ggtt->vm.clear_range(&ggtt->vm, hole_start, in init_ggtt()
601 ggtt->vm.clear_range(&ggtt->vm, ggtt->vm.total - PAGE_SIZE, PAGE_SIZE); in init_ggtt()
624 ppgtt_bind_vma(&i915_vm_to_ggtt(vm)->alias->vm, in aliasing_gtt_bind_vma()
635 vm->clear_range(vm, vma->node.start, vma->size); in aliasing_gtt_unbind_vma()
672 ppgtt->vm.allocate_va_range(&ppgtt->vm, &stash, 0, ggtt->vm.total); in init_aliasing_ppgtt()
744 ggtt->vm.cleanup(&ggtt->vm); in ggtt_cleanup_hw()
872 ggtt->vm.pte_encode(px_dma(ggtt->vm.scratch[0]), in ggtt_probe_common()
[all …]
A Dintel_gtt.h361 #define i915_is_ggtt(vm) ((vm)->is_ggtt) argument
362 #define i915_is_dpt(vm) ((vm)->is_dpt) argument
363 #define i915_is_ggtt_or_dpt(vm) (i915_is_ggtt(vm) || i915_is_dpt(vm)) argument
383 return i915_is_ggtt(vm) && vm->mm.color_adjust; in i915_vm_has_cache_coloring()
391 return container_of(vm, struct i915_ggtt, vm); in i915_vm_to_ggtt()
399 return container_of(vm, struct i915_ppgtt, vm); in i915_vm_to_ppgtt()
406 return vm; in i915_vm_get()
418 return &vm->_resv; in i915_vm_resv_get()
464 i915_vm_put(vm); in i915_vm_close()
573 #define free_pt(vm, px) free_px(vm, px, 0) argument
[all …]
A Dgen8_ppgtt.c199 __gen8_ppgtt_cleanup(vm, ppgtt->pd, gen8_pd_top_count(vm), vm->top); in gen8_ppgtt_cleanup()
272 __gen8_ppgtt_clear(vm, i915_vm_to_ppgtt(vm)->pd, in gen8_ppgtt_clear()
357 __gen8_ppgtt_alloc(vm, stash, i915_vm_to_ppgtt(vm)->pd, in gen8_ppgtt_alloc()
404 __gen8_ppgtt_foreach(vm, i915_vm_to_ppgtt(vm)->pd, in gen8_ppgtt_foreach()
633 if (vm->has_read_only && vm->gt->vm && !i915_is_ggtt(vm->gt->vm)) { in gen8_init_scratch()
634 struct i915_address_space *clone = vm->gt->vm; in gen8_init_scratch()
660 obj = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in gen8_init_scratch()
686 struct i915_address_space *vm = &ppgtt->vm; in gen8_preallocate_top_level_pdp() local
729 pd->pt.base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in gen8_alloc_top_pd()
767 ppgtt->vm.top = i915_vm_is_4lvl(&ppgtt->vm) ? 3 : 2; in gen8_ppgtt_create()
[all …]
/linux/drivers/gpu/drm/i915/display/
A Dintel_dpt.c13 struct i915_address_space vm; member
20 #define i915_is_dpt(vm) ((vm)->is_dpt) argument
26 GEM_BUG_ON(!i915_is_dpt(vm)); in i915_vm_to_dpt()
27 return container_of(vm, struct i915_dpt, vm); in i915_vm_to_dpt()
93 vma->vm->insert_entries(vma->vm, vma, cache_level, pte_flags); in dpt_bind_vma()
107 vm->clear_range(vm, vma->node.start, vma->size); in dpt_unbind_vma()
207 vm = &dpt->vm; in intel_dpt_create()
209 vm->gt = &i915->gt; in intel_dpt_create()
210 vm->i915 = i915; in intel_dpt_create()
213 vm->is_dpt = true; in intel_dpt_create()
[all …]
/linux/drivers/virt/acrn/
A Dvm.c40 vm->vmid = vm_param->vmid; in acrn_vm_create()
53 acrn_ioeventfd_init(vm); in acrn_vm_create()
54 acrn_irqfd_init(vm); in acrn_vm_create()
56 return vm; in acrn_vm_create()
77 list_del_init(&vm->list); in acrn_vm_destroy()
80 acrn_ioeventfd_deinit(vm); in acrn_vm_destroy()
81 acrn_irqfd_deinit(vm); in acrn_vm_destroy()
82 acrn_ioreq_deinit(vm); in acrn_vm_destroy()
84 if (vm->monitor_page) { in acrn_vm_destroy()
86 vm->monitor_page = NULL; in acrn_vm_destroy()
[all …]
A Dirqfd.c33 struct acrn_vm *vm; member
44 struct acrn_vm *vm = irqfd->vm; in acrn_irqfd_inject() local
66 struct acrn_vm *vm; in hsm_irqfd_shutdown_work() local
69 vm = irqfd->vm; in hsm_irqfd_shutdown_work()
70 mutex_lock(&vm->irqfds_lock); in hsm_irqfd_shutdown_work()
82 struct acrn_vm *vm; in hsm_irqfd_wakeup() local
85 vm = irqfd->vm; in hsm_irqfd_wakeup()
123 irqfd->vm = vm; in acrn_irqfd_assign()
215 INIT_LIST_HEAD(&vm->irqfds); in acrn_irqfd_init()
217 vm->irqfd_wq = alloc_workqueue("acrn_irqfd-%u", 0, 0, vm->vmid); in acrn_irqfd_init()
[all …]
A Dioreq.c98 if (vm->default_client) in acrn_ioreq_request_default_complete()
437 client->vm = vm; in acrn_ioreq_client_create()
473 struct acrn_vm *vm = client->vm; in acrn_ioreq_client_destroy() local
551 struct acrn_vm *vm; in ioreq_dispatcher() local
555 if (!vm->ioreq_buf) in ioreq_dispatcher()
607 if (vm->ioreq_buf) in acrn_ioreq_init()
623 vm->ioreq_page = page; in acrn_ioreq_init()
629 vm->ioreq_buf = NULL; in acrn_ioreq_init()
650 if (vm->default_client) in acrn_ioreq_deinit()
653 if (vm->ioreq_buf && vm->ioreq_page) { in acrn_ioreq_deinit()
[all …]
A Dioeventfd.c121 mutex_lock(&vm->ioeventfds_lock); in acrn_ioeventfd_assign()
135 mutex_unlock(&vm->ioeventfds_lock); in acrn_ioeventfd_assign()
140 mutex_unlock(&vm->ioeventfds_lock); in acrn_ioeventfd_assign()
157 mutex_lock(&vm->ioeventfds_lock); in acrn_ioeventfd_deassign()
164 acrn_ioeventfd_shutdown(vm, p); in acrn_ioeventfd_deassign()
248 mutex_init(&vm->ioeventfds_lock); in acrn_ioeventfd_init()
249 INIT_LIST_HEAD(&vm->ioeventfds); in acrn_ioeventfd_init()
251 vm->ioeventfd_client = acrn_ioreq_client_create(vm, in acrn_ioeventfd_init()
254 if (!vm->ioeventfd_client) { in acrn_ioeventfd_init()
269 mutex_lock(&vm->ioeventfds_lock); in acrn_ioeventfd_deinit()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Dmock_gtt.c73 ppgtt->vm.gt = &i915->gt; in mock_ppgtt()
74 ppgtt->vm.i915 = i915; in mock_ppgtt()
76 ppgtt->vm.dma = i915->drm.dev; in mock_ppgtt()
85 ppgtt->vm.cleanup = mock_cleanup; in mock_ppgtt()
112 ggtt->vm.gt = &i915->gt; in mock_init_ggtt()
113 ggtt->vm.i915 = i915; in mock_init_ggtt()
114 ggtt->vm.is_ggtt = true; in mock_init_ggtt()
118 ggtt->vm.total = 4096 * PAGE_SIZE; in mock_init_ggtt()
120 ggtt->vm.alloc_pt_dma = alloc_pt_dma; in mock_init_ggtt()
125 ggtt->vm.cleanup = mock_cleanup; in mock_init_ggtt()
[all …]
/linux/sound/pci/ctxfi/
A Dctvmem.c36 if (size > vm->size) { in get_vm_block()
42 mutex_lock(&vm->lock); in get_vm_block()
54 vm->size -= size; in get_vm_block()
68 vm->size -= size; in get_vm_block()
171 struct ct_vm *vm; in ct_vm_create() local
177 vm = kzalloc(sizeof(*vm), GFP_KERNEL); in ct_vm_create()
178 if (!vm) in ct_vm_create()
193 ct_vm_destroy(vm); in ct_vm_create()
209 *rvm = vm; in ct_vm_create()
239 vm->size = 0; in ct_vm_destroy()
[all …]
/linux/tools/testing/selftests/kvm/x86_64/
A Duserspace_msr_exit_test.c531 run_guest(vm); in run_guest_then_process_rdmsr()
537 run_guest(vm); in run_guest_then_process_wrmsr()
543 run_guest(vm); in run_guest_then_process_ucall()
549 run_guest(vm); in run_guest_then_process_ucall_done()
558 struct kvm_vm *vm; in test_msr_filter_allow() local
592 run_guest(vm); in test_msr_filter_allow()
616 kvm_vm_free(vm); in test_msr_filter_allow()
683 struct kvm_vm *vm; in test_msr_filter_deny() local
726 kvm_vm_free(vm); in test_msr_filter_deny()
734 struct kvm_vm *vm; in test_msr_permission_bitmap() local
[all …]
A Dset_boot_cpu_id.c57 vcpu_run(vm, vcpuid); in run_vcpu()
85 struct kvm_vm *vm; in create_vm() local
94 vm_create_irqchip(vm); in create_vm()
96 return vm; in create_vm()
109 struct kvm_vm *vm; in run_vm_bsp() local
112 vm = create_vm(); in run_vm_bsp()
120 run_vcpu(vm, VCPU_ID0); in run_vm_bsp()
123 kvm_vm_free(vm); in run_vm_bsp()
128 struct kvm_vm *vm; in check_set_bsp_busy() local
131 vm = create_vm(); in check_set_bsp_busy()
[all …]
A Demulator_error_test.c31 static void run_guest(struct kvm_vm *vm) in run_guest() argument
35 rc = _vcpu_run(vm, VCPU_ID); in run_guest()
96 vcpu_regs_get(vm, VCPU_ID, &regs); in process_exit_on_emulation_error()
116 do_guest_assert(vm, &uc); in check_for_guest_assert()
125 check_for_guest_assert(vm); in process_ucall_done()
151 do_guest_assert(vm, &uc); in process_ucall()
154 process_ucall_done(vm); in process_ucall()
171 struct kvm_vm *vm; in main() local
210 run_guest(vm); in main()
212 run_guest(vm); in main()
[all …]
A Dvmx_set_nested_state_test.c122 test_nested_state(vm, state); in test_vmx_nested_state()
141 vcpu_enable_evmcs(vm, VCPU_ID); in test_vmx_nested_state()
143 test_nested_state(vm, state); in test_vmx_nested_state()
204 test_nested_state(vm, state); in test_vmx_nested_state()
212 test_nested_state(vm, state); in test_vmx_nested_state()
236 test_nested_state(vm, state); in test_vmx_nested_state()
247 void disable_vmx(struct kvm_vm *vm) in disable_vmx() argument
265 struct kvm_vm *vm; in main() local
286 disable_vmx(vm); in main()
315 test_vmx_nested_state(vm); in main()
[all …]
/linux/tools/testing/selftests/kvm/lib/x86_64/
A Dprocessor.c184 vm->pgd = vm_alloc_page_table(vm); in virt_pgd_alloc()
252 paddr, vm->max_gfn, vm->page_size); in __virt_pg_map()
258 pml4e = virt_create_upper_pte(vm, vm->pgd >> vm->page_shift, in __virt_pg_map()
331 pml4e = addr_gpa2hva(vm, vm->pgd); in _vm_get_page_table_entry()
474 void *gdt = addr_gva2hva(vm, vm->gdt); in kvm_seg_fill_gdt_64bit()
523 if (vm) in kvm_seg_set_kernel_code_64bit()
574 pml4e = addr_gpa2hva(vm, vm->pgd); in addr_gva2gpa()
600 vm->gdt = vm_vaddr_alloc_page(vm); in kvm_setup_gdt()
610 vm->tss = vm_vaddr_alloc_page(vm); in kvm_setup_tss_64bit()
1298 vm->idt = vm_vaddr_alloc_page(vm); in vm_init_descriptor_tables()
[all …]
A Dvmx.c408 TEST_ASSERT((nested_paddr >> vm->page_shift) <= vm->max_gfn, in nested_pg_map()
411 paddr, vm->max_gfn, vm->page_size); in nested_pg_map()
416 TEST_ASSERT((paddr >> vm->page_shift) <= vm->max_gfn, in nested_pg_map()
419 paddr, vm->max_gfn, vm->page_size); in nested_pg_map()
429 pml4e[index[3]].address = vm_alloc_page_table(vm) >> vm->page_shift; in nested_pg_map()
437 pdpe = addr_gpa2hva(vm, pml4e[index[3]].address * vm->page_size); in nested_pg_map()
439 pdpe[index[2]].address = vm_alloc_page_table(vm) >> vm->page_shift; in nested_pg_map()
447 pde = addr_gpa2hva(vm, pdpe[index[2]].address * vm->page_size); in nested_pg_map()
449 pde[index[1]].address = vm_alloc_page_table(vm) >> vm->page_shift; in nested_pg_map()
457 pte = addr_gpa2hva(vm, pde[index[1]].address * vm->page_size); in nested_pg_map()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_vm.c269 struct amdgpu_vm *vm = vm_bo->vm; in amdgpu_vm_bo_evicted() local
365 base->vm = vm; in amdgpu_vm_bo_base_init()
663 struct amdgpu_vm *vm = bo_base->vm; in amdgpu_vm_del_from_lru_notify() local
857 params.vm = vm; in amdgpu_vm_clear_bo()
1295 if (base->vm != vm) in amdgpu_vm_bo_find()
1403 params.vm = vm; in amdgpu_vm_update_pdes()
1626 struct amdgpu_vm *vm = params->vm; in amdgpu_vm_update_ptes() local
1726 params.vm = vm; in amdgpu_vm_bo_update_mapping()
1882 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update() local
2763 struct amdgpu_vm *vm = bo_base->vm; in amdgpu_vm_bo_invalidate() local
[all …]
/linux/drivers/gpu/drm/radeon/
A Dradeon_vm.c300 if (bo_va->vm == vm) in radeon_vm_bo_find()
330 bo_va->vm = vm; in radeon_vm_bo_add()
339 mutex_lock(&vm->mutex); in radeon_vm_bo_add()
453 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_set_addr() local
478 mutex_lock(&vm->mutex); in radeon_vm_bo_set_addr()
508 tmp->vm = vm; in radeon_vm_bo_set_addr()
915 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_update() local
924 bo_va->bo, vm); in radeon_vm_bo_update()
1121 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_rmv() local
1180 vm->ib_bo_va = NULL; in radeon_vm_init()
[all …]

Completed in 505 milliseconds

12345678910>>...18