Home
last modified time | relevance | path

Searched refs:gvt (Results 1 – 25 of 36) sorted by relevance

12

/linux/drivers/gpu/drm/i915/gvt/
A Dgvt.c88 mutex_lock(&gvt->lock); in intel_gvt_test_and_emulate_vblank()
157 struct intel_gvt *gvt = fetch_and_zero(&i915->gvt); in intel_gvt_clean_device() local
175 kfree(i915->gvt); in intel_gvt_clean_device()
199 if (!gvt) in intel_gvt_init_device()
208 gvt->gt = &i915->gt; in intel_gvt_init_device()
209 i915->gvt = gvt; in intel_gvt_init_device()
284 kfree(gvt); in intel_gvt_init_device()
285 i915->gvt = NULL; in intel_gvt_init_device()
302 void *gvt; in intel_gvt_register_hypervisor() local
317 gvt = (void *)kdev_to_i915(intel_gvt_host.dev)->gvt; in intel_gvt_register_hypervisor()
[all …]
A Dvgpu.c131 if (!gvt->types) in intel_gvt_init_vgpu_types()
163 gvt->types[i].high_gm_size, gvt->types[i].fence, in intel_gvt_init_vgpu_types()
168 gvt->num_types = i; in intel_gvt_init_vgpu_types()
174 kfree(gvt->types); in intel_gvt_clean_vgpu_types()
202 gvt->types[i].avail_instance, gvt->types[i].low_gm_size, in intel_gvt_update_vgpu_types()
203 gvt->types[i].high_gm_size, gvt->types[i].fence); in intel_gvt_update_vgpu_types()
275 struct intel_gvt *gvt = vgpu->gvt; in intel_gvt_destroy_vgpu() local
284 mutex_lock(&gvt->lock); in intel_gvt_destroy_vgpu()
331 vgpu->gvt = gvt; in intel_gvt_create_idle_vgpu()
388 vgpu->gvt = gvt; in __intel_gvt_create_vgpu()
[all …]
A Dsched_policy.c68 struct intel_gvt *gvt; member
213 struct intel_gvt *gvt = sched_data->gvt; in tbs_sched_func() local
245 mutex_lock(&gvt->sched_lock); in intel_gvt_schedule()
280 &gvt->scheduler; in tbs_sched_init()
292 data->gvt = gvt; in tbs_sched_init()
302 &gvt->scheduler; in tbs_sched_clean()
330 struct intel_gvt *gvt = vgpu->gvt; in tbs_sched_clean_vgpu() local
384 mutex_lock(&gvt->sched_lock); in intel_gvt_init_sched_policy()
386 ret = gvt->scheduler.sched_ops->init(gvt); in intel_gvt_init_sched_policy()
395 gvt->scheduler.sched_ops->clean(gvt); in intel_gvt_clean_sched_policy()
[all …]
A Dgvt.h347 return i915->gvt; in to_gvt()
381 #define gvt_to_ggtt(gvt) ((gvt)->gt->ggtt) argument
384 #define gvt_aperture_sz(gvt) gvt_to_ggtt(gvt)->mappable_end argument
385 #define gvt_aperture_pa_base(gvt) gvt_to_ggtt(gvt)->gmadr.start argument
387 #define gvt_ggtt_gm_sz(gvt) gvt_to_ggtt(gvt)->vm.total argument
388 #define gvt_ggtt_sz(gvt) (gvt_to_ggtt(gvt)->vm.total >> PAGE_SHIFT << 3) argument
389 #define gvt_hidden_sz(gvt) (gvt_ggtt_gm_sz(gvt) - gvt_aperture_sz(gvt)) argument
392 #define gvt_aperture_gmadr_end(gvt) (gvt_aperture_gmadr_base(gvt) \ argument
395 #define gvt_hidden_gmadr_base(gvt) (gvt_aperture_gmadr_base(gvt) \ argument
397 #define gvt_hidden_gmadr_end(gvt) (gvt_hidden_gmadr_base(gvt) \ argument
[all …]
A Daperture_gm.c43 struct intel_gvt *gvt = vgpu->gvt; in alloc_gm() local
44 struct intel_gt *gt = gvt->gt; in alloc_gm()
81 struct intel_gvt *gvt = vgpu->gvt; in alloc_vgpu_gm() local
82 struct intel_gt *gt = gvt->gt; in alloc_vgpu_gm()
109 struct intel_gvt *gvt = vgpu->gvt; in free_vgpu_gm() local
131 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_write_fence() local
167 struct intel_gvt *gvt = vgpu->gvt; in free_vgpu_fence() local
192 struct intel_gvt *gvt = vgpu->gvt; in alloc_vgpu_fence() local
234 struct intel_gvt *gvt = vgpu->gvt; in free_resource() local
244 struct intel_gvt *gvt = vgpu->gvt; in alloc_resource() local
[all …]
A Dmmio.c53 #define reg_is_mmio(gvt, reg) \ argument
56 #define reg_is_gtt(gvt, reg) \ argument
58 && reg < gvt->device_info.gtt_start_offset + gvt_ggtt_sz(gvt))
63 struct intel_gvt *gvt = NULL; in failsafe_emulate_mmio_rw() local
70 gvt = vgpu->gvt; in failsafe_emulate_mmio_rw()
73 if (reg_is_mmio(gvt, offset)) { in failsafe_emulate_mmio_rw()
105 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_emulate_mmio_read() local
121 if (reg_is_gtt(gvt, offset)) { in intel_vgpu_emulate_mmio_read()
180 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_emulate_mmio_write() local
197 if (reg_is_gtt(gvt, offset)) { in intel_vgpu_emulate_mmio_write()
[all …]
A Ddebugfs.c87 struct intel_gvt *gvt = vgpu->gvt; in vgpu_mmio_diff_show() local
97 mutex_lock(&gvt->lock); in vgpu_mmio_diff_show()
98 spin_lock_bh(&gvt->scheduler.mmio_context_lock); in vgpu_mmio_diff_show()
100 mmio_hw_access_pre(gvt->gt); in vgpu_mmio_diff_show()
103 mmio_hw_access_post(gvt->gt); in vgpu_mmio_diff_show()
105 spin_unlock_bh(&gvt->scheduler.mmio_context_lock); in vgpu_mmio_diff_show()
106 mutex_unlock(&gvt->lock); in vgpu_mmio_diff_show()
186 void intel_gvt_debugfs_init(struct intel_gvt *gvt) in intel_gvt_debugfs_init() argument
193 &gvt->mmio.num_tracked_mmio); in intel_gvt_debugfs_init()
202 debugfs_remove_recursive(gvt->debugfs_root); in intel_gvt_debugfs_clean()
[all …]
A Dfirmware.c76 static int expose_firmware_sysfs(struct intel_gvt *gvt) in expose_firmware_sysfs() argument
112 memcpy(gvt->firmware.mmio, p, info->mmio_size); in expose_firmware_sysfs()
141 void intel_gvt_free_firmware(struct intel_gvt *gvt) in intel_gvt_free_firmware() argument
143 if (!gvt->firmware.firmware_loaded) in intel_gvt_free_firmware()
144 clean_firmware_sysfs(gvt); in intel_gvt_free_firmware()
146 kfree(gvt->firmware.cfg_space); in intel_gvt_free_firmware()
147 vfree(gvt->firmware.mmio); in intel_gvt_free_firmware()
150 static int verify_firmware(struct intel_gvt *gvt, in verify_firmware() argument
205 int intel_gvt_load_firmware(struct intel_gvt *gvt) in intel_gvt_load_firmware() argument
251 ret = verify_firmware(gvt, fw); in intel_gvt_load_firmware()
[all …]
A Dgtt.c657 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_spt_get_entry() local
686 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_spt_set_entry() local
1310 struct intel_gvt *gvt = vgpu->gvt; in ppgtt_populate_spt() local
1434 struct intel_gvt *gvt = vgpu->gvt; in sync_oos_page() local
1476 struct intel_gvt *gvt = vgpu->gvt; in detach_oos_page() local
1786 struct intel_gvt *gvt = vgpu->gvt; in invalidate_ppgtt_mm() local
1816 struct intel_gvt *gvt = vgpu->gvt; in shadow_ppgtt_mm() local
1890 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_create_ppgtt_mm() local
2098 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_gma_to_gpa() local
2236 struct intel_gvt *gvt = vgpu->gvt; in emulate_ggtt_mmio_write() local
[all …]
A Dmmio.h73 intel_gvt_render_mmio_to_engine(struct intel_gvt *gvt, unsigned int reg);
74 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt);
75 bool intel_gvt_match_device(struct intel_gvt *gvt, unsigned long device);
77 int intel_gvt_setup_mmio_info(struct intel_gvt *gvt);
78 void intel_gvt_clean_mmio_info(struct intel_gvt *gvt);
79 int intel_gvt_for_each_tracked_mmio(struct intel_gvt *gvt,
80 int (*handler)(struct intel_gvt *gvt, u32 offset, void *data),
83 struct intel_gvt_mmio_info *intel_gvt_find_mmio_info(struct intel_gvt *gvt,
102 bool intel_gvt_in_force_nonpriv_whitelist(struct intel_gvt *gvt,
111 void intel_gvt_restore_fence(struct intel_gvt *gvt);
[all …]
A Dinterrupt.c147 struct intel_gvt *gvt, in regbase_to_irq_info() argument
150 struct intel_gvt_irq *irq = &gvt->irq; in regbase_to_irq_info()
178 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_imr_handler() local
208 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_master_irq_handler() local
247 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_ier_handler() local
498 struct intel_gvt *gvt = irq_to_gvt(irq); in gen8_init_irq() local
543 if (HAS_ENGINE(gvt->gt, VCS1)) { in gen8_init_irq()
575 if (IS_BROADWELL(gvt->gt->i915)) { in gen8_init_irq()
626 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_trigger_virtual_event() local
627 struct intel_gvt_irq *irq = &gvt->irq; in intel_vgpu_trigger_virtual_event()
[all …]
A Dmmio_context.c165 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs() local
168 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs()
204 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit() local
220 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit()
482 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio()
585 if (GRAPHICS_VER(gvt->gt->i915) >= 9) { in intel_gvt_init_engine_mmio_context()
586 gvt->engine_mmio_list.mmio = gen9_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
592 gvt->engine_mmio_list.mmio = gen8_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
597 for (mmio = gvt->engine_mmio_list.mmio; in intel_gvt_init_engine_mmio_context()
600 gvt->engine_mmio_list.ctx_mmio_count[mmio->id]++; in intel_gvt_init_engine_mmio_context()
[all …]
A Dsched_policy.h41 int (*init)(struct intel_gvt *gvt);
42 void (*clean)(struct intel_gvt *gvt);
49 void intel_gvt_schedule(struct intel_gvt *gvt);
51 int intel_gvt_init_sched_policy(struct intel_gvt *gvt);
53 void intel_gvt_clean_sched_policy(struct intel_gvt *gvt);
63 void intel_gvt_kick_schedule(struct intel_gvt *gvt);
A Dscheduler.c128 struct intel_gvt *gvt = vgpu->gvt; in populate_shadow_context() local
522 struct intel_gvt *gvt = workload->vgpu->gvt; in prepare_shadow_batch_buffer() local
851 mutex_lock(&gvt->sched_lock); in pick_next_workload()
899 mutex_unlock(&gvt->sched_lock); in pick_next_workload()
1075 mutex_lock(&gvt->sched_lock); in complete_current_workload()
1141 if (gvt->scheduler.need_reschedule) in complete_current_workload()
1144 mutex_unlock(&gvt->sched_lock); in complete_current_workload()
1152 struct intel_gvt *gvt = engine->i915->gvt; in workload_thread() local
1229 struct intel_gvt *gvt = vgpu->gvt; in intel_gvt_wait_vgpu_idle() local
1248 for_each_engine(engine, gvt->gt, i) { in intel_gvt_clean_workload_scheduler()
[all …]
A Dhandlers.c142 gvt->mmio.num_tracked_mmio++; in new_mmio_info()
259 struct intel_gvt *gvt = vgpu->gvt; in fence_mmio_write() local
268 mmio_hw_access_pre(gvt->gt); in fence_mmio_write()
271 mmio_hw_access_post(gvt->gt); in fence_mmio_write()
1944 struct intel_gvt *gvt = vgpu->gvt; in mmio_read_from_hw() local
1959 mmio_hw_access_pre(gvt->gt); in mmio_read_from_hw()
1962 mmio_hw_access_post(gvt->gt); in mmio_read_from_hw()
3772 ret = handler(gvt, in intel_gvt_for_each_tracked_mmio()
3872 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_mmio_reg_rw() local
3948 mmio_hw_access_pre(gvt->gt); in intel_gvt_restore_fence()
[all …]
A Dcfg_space.c119 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_read()
125 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_read()
313 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_write()
320 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_write()
376 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_init_cfg_space() local
377 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev); in intel_vgpu_init_cfg_space()
378 const struct intel_gvt_device_info *info = &gvt->device_info; in intel_vgpu_init_cfg_space()
382 memcpy(vgpu_cfg_space(vgpu), gvt->firmware.cfg_space, in intel_vgpu_init_cfg_space()
397 gvt_aperture_pa_base(gvt), true); in intel_vgpu_init_cfg_space()
A Dkvmgt.c153 struct intel_gvt *gvt = kdev_to_i915(mtype_get_parent_dev(mtype))->gvt; in available_instances_show() local
174 struct intel_gvt *gvt = kdev_to_i915(mtype_get_parent_dev(mtype))->gvt; in description_show() local
210 for (i = 0; i < gvt->num_types; i++) { in intel_gvt_init_vgpu_type_groups()
211 type = &gvt->types[i]; in intel_gvt_init_vgpu_type_groups()
238 for (i = 0; i < gvt->num_types; i++) { in intel_gvt_cleanup_vgpu_type_groups()
795 struct intel_gvt *gvt; in intel_vgpu_create() local
799 gvt = kdev_to_i915(pdev)->gvt; in intel_vgpu_create()
1151 struct intel_gvt *gvt = vgpu->gvt; in gtt_entry() local
1162 offset < gvt->device_info.gtt_start_offset + gvt_ggtt_sz(gvt)) ? in gtt_entry()
1897 mutex_lock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
[all …]
A Dcmd_parser.c891 struct intel_gvt *gvt = vgpu->gvt; in cmd_reg_handler() local
921 (IS_BROADWELL(gvt->gt->i915) && in cmd_reg_handler()
1090 struct intel_gvt *gvt = s->vgpu->gvt; in cmd_handler_lrm() local
3101 struct intel_gvt *gvt = vgpu->gvt; in intel_gvt_update_reg_whitelist() local
3105 if (gvt->is_reg_whitelist_updated) in intel_gvt_update_reg_whitelist()
3150 gvt->is_reg_whitelist_updated = true; in intel_gvt_update_reg_whitelist()
3225 add_cmd_entry(gvt, e); in init_cmd_table()
3243 hash_init(gvt->cmd_table); in clean_cmd_table()
3248 clean_cmd_table(gvt); in intel_gvt_clean_cmd_parser()
3255 ret = init_cmd_table(gvt); in intel_gvt_init_cmd_parser()
[all …]
A Dmpt.h55 void *gvt, const void *ops) in intel_gvt_hypervisor_host_init() argument
60 return intel_gvt_host.mpt->host_init(dev, gvt, ops); in intel_gvt_hypervisor_host_init()
66 static inline void intel_gvt_hypervisor_host_exit(struct device *dev, void *gvt) in intel_gvt_hypervisor_host_exit() argument
72 intel_gvt_host.mpt->host_exit(dev, gvt); in intel_gvt_hypervisor_host_exit()
120 unsigned long offset = vgpu->gvt->device_info.msi_cap_offset; in intel_gvt_hypervisor_inject_msi()
A Dcmd_parser.h46 void intel_gvt_clean_cmd_parser(struct intel_gvt *gvt);
48 int intel_gvt_init_cmd_parser(struct intel_gvt *gvt);
A Ddisplay.c60 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in edp_pipe_is_enabled()
72 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pipe_is_enabled()
172 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in emulate_monitor_status_change()
528 intel_gvt_request_service(vgpu->gvt, in vblank_timer_fn()
537 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in setup_virtual_dp_monitor()
620 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in emulate_vblank_on_pipe()
652 for_each_pipe(vgpu->gvt->gt->i915, pipe) in intel_vgpu_emulate_vblank()
667 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_hotplug()
756 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_clean_display()
782 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_init_display()
A DMakefile2 GVT_DIR := gvt
3 GVT_SOURCE := gvt.o aperture_gm.o handlers.o vgpu.o trace_points.o firmware.o \
A Dhypercall.h51 int (*host_init)(struct device *dev, void *gvt, const void *ops);
52 void (*host_exit)(struct device *dev, void *gvt);
A Dgtt.h226 int intel_gvt_init_gtt(struct intel_gvt *gvt);
228 void intel_gvt_clean_gtt(struct intel_gvt *gvt);
295 void intel_gvt_restore_ggtt(struct intel_gvt *gvt);
/linux/drivers/gpu/drm/i915/
A Dintel_gvt.c133 return dev_priv->gvt; in intel_gvt_active()
163 intel_gvt_pm_resume(dev_priv->gvt); in intel_gvt_resume()

Completed in 66 milliseconds

12