Home
last modified time | relevance | path

Searched refs:gvt (Results 1 – 25 of 33) sorted by relevance

12

/linux-6.1.9/drivers/gpu/drm/i915/gvt/
DMakefile4 gvt/aperture_gm.o \
5 gvt/cfg_space.o \
6 gvt/cmd_parser.o \
7 gvt/debugfs.o \
8 gvt/display.o \
9 gvt/dmabuf.o \
10 gvt/edid.o \
11 gvt/execlist.o \
12 gvt/fb_decoder.o \
13 gvt/firmware.o \
[all …]
Daperture_gm.c44 struct intel_gvt *gvt = vgpu->gvt; in alloc_gm() local
45 struct intel_gt *gt = gvt->gt; in alloc_gm()
54 start = ALIGN(gvt_hidden_gmadr_base(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
55 end = ALIGN(gvt_hidden_gmadr_end(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
60 start = ALIGN(gvt_aperture_gmadr_base(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
61 end = ALIGN(gvt_aperture_gmadr_end(gvt), I915_GTT_PAGE_SIZE); in alloc_gm()
82 struct intel_gvt *gvt = vgpu->gvt; in alloc_vgpu_gm() local
83 struct intel_gt *gt = gvt->gt; in alloc_vgpu_gm()
110 struct intel_gvt *gvt = vgpu->gvt; in free_vgpu_gm() local
111 struct intel_gt *gt = gvt->gt; in free_vgpu_gm()
[all …]
Dsched_policy.c42 for_each_engine(engine, vgpu->gvt->gt, i) { in vgpu_has_pending_workload()
68 struct intel_gvt *gvt; member
80 if (!vgpu || vgpu == vgpu->gvt->idle_vgpu) in vgpu_update_timeslice()
132 static void try_to_schedule_next_vgpu(struct intel_gvt *gvt) in try_to_schedule_next_vgpu() argument
134 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in try_to_schedule_next_vgpu()
155 for_each_engine(engine, gvt->gt, i) { in try_to_schedule_next_vgpu()
172 for_each_engine(engine, gvt->gt, i) in try_to_schedule_next_vgpu()
213 struct intel_gvt *gvt = sched_data->gvt; in tbs_sched_func() local
214 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in tbs_sched_func()
233 scheduler->next_vgpu = gvt->idle_vgpu; in tbs_sched_func()
[all …]
Dgvt.h177 struct intel_gvt *gvt; member
369 return i915->gvt; in to_gvt()
385 static inline void intel_gvt_request_service(struct intel_gvt *gvt, in intel_gvt_request_service() argument
388 set_bit(service, (void *)&gvt->service_request); in intel_gvt_request_service()
389 wake_up(&gvt->service_thread_wq); in intel_gvt_request_service()
392 void intel_gvt_free_firmware(struct intel_gvt *gvt);
393 int intel_gvt_load_firmware(struct intel_gvt *gvt);
403 #define gvt_to_ggtt(gvt) ((gvt)->gt->ggtt) argument
406 #define gvt_aperture_sz(gvt) gvt_to_ggtt(gvt)->mappable_end argument
407 #define gvt_aperture_pa_base(gvt) gvt_to_ggtt(gvt)->gmadr.start argument
[all …]
Ddebugfs.c58 static inline int mmio_diff_handler(struct intel_gvt *gvt, in mmio_diff_handler() argument
65 preg = intel_uncore_read_notrace(gvt->gt->uncore, _MMIO(offset)); in mmio_diff_handler()
87 struct intel_gvt *gvt = vgpu->gvt; in vgpu_mmio_diff_show() local
97 mutex_lock(&gvt->lock); in vgpu_mmio_diff_show()
98 spin_lock_bh(&gvt->scheduler.mmio_context_lock); in vgpu_mmio_diff_show()
100 mmio_hw_access_pre(gvt->gt); in vgpu_mmio_diff_show()
102 intel_gvt_for_each_tracked_mmio(gvt, mmio_diff_handler, &param); in vgpu_mmio_diff_show()
103 mmio_hw_access_post(gvt->gt); in vgpu_mmio_diff_show()
105 spin_unlock_bh(&gvt->scheduler.mmio_context_lock); in vgpu_mmio_diff_show()
106 mutex_unlock(&gvt->lock); in vgpu_mmio_diff_show()
[all …]
Dmmio.c56 #define reg_is_mmio(gvt, reg) \ argument
57 (reg >= 0 && reg < gvt->device_info.mmio_size)
59 #define reg_is_gtt(gvt, reg) \ argument
60 (reg >= gvt->device_info.gtt_start_offset \
61 && reg < gvt->device_info.gtt_start_offset + gvt_ggtt_sz(gvt))
66 struct intel_gvt *gvt = NULL; in failsafe_emulate_mmio_rw() local
73 gvt = vgpu->gvt; in failsafe_emulate_mmio_rw()
76 if (reg_is_mmio(gvt, offset)) { in failsafe_emulate_mmio_rw()
83 } else if (reg_is_gtt(gvt, offset)) { in failsafe_emulate_mmio_rw()
84 offset -= gvt->device_info.gtt_start_offset; in failsafe_emulate_mmio_rw()
[all …]
Dvgpu.c40 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in populate_pvinfo_page()
104 int intel_gvt_init_vgpu_types(struct intel_gvt *gvt) in intel_gvt_init_vgpu_types() argument
106 unsigned int low_avail = gvt_aperture_sz(gvt) - HOST_LOW_GM_SIZE; in intel_gvt_init_vgpu_types()
107 unsigned int high_avail = gvt_hidden_sz(gvt) - HOST_HIGH_GM_SIZE; in intel_gvt_init_vgpu_types()
111 gvt->types = kcalloc(num_types, sizeof(struct intel_vgpu_type), in intel_gvt_init_vgpu_types()
113 if (!gvt->types) in intel_gvt_init_vgpu_types()
116 gvt->mdev_types = kcalloc(num_types, sizeof(*gvt->mdev_types), in intel_gvt_init_vgpu_types()
118 if (!gvt->mdev_types) in intel_gvt_init_vgpu_types()
129 sprintf(gvt->types[i].name, "GVTg_V%u_%s", in intel_gvt_init_vgpu_types()
130 GRAPHICS_VER(gvt->gt->i915) == 8 ? 4 : 5, conf->name); in intel_gvt_init_vgpu_types()
[all …]
Dfirmware.c69 static int expose_firmware_sysfs(struct intel_gvt *gvt) in expose_firmware_sysfs() argument
71 struct intel_gvt_device_info *info = &gvt->device_info; in expose_firmware_sysfs()
72 struct drm_i915_private *i915 = gvt->gt->i915; in expose_firmware_sysfs()
96 memcpy(gvt->firmware.cfg_space, i915->vgpu.initial_cfg_space, in expose_firmware_sysfs()
98 memcpy(p, gvt->firmware.cfg_space, info->cfg_space_size); in expose_firmware_sysfs()
102 memcpy(gvt->firmware.mmio, i915->vgpu.initial_mmio, in expose_firmware_sysfs()
105 memcpy(p, gvt->firmware.mmio, info->mmio_size); in expose_firmware_sysfs()
121 static void clean_firmware_sysfs(struct intel_gvt *gvt) in clean_firmware_sysfs() argument
123 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev); in clean_firmware_sysfs()
134 void intel_gvt_free_firmware(struct intel_gvt *gvt) in intel_gvt_free_firmware() argument
[all …]
Dkvmgt.c182 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in gvt_dma_map_page()
205 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in gvt_dma_unmap_page()
639 mutex_lock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
640 for_each_active_vgpu(vgpu->gvt, itr, id) { in __kvmgt_vgpu_exist()
650 mutex_unlock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
786 aperture_va = io_mapping_map_wc(&vgpu->gvt->gt->ggtt->iomap, in intel_vgpu_aperture_rw()
853 struct intel_gvt *gvt = vgpu->gvt; in gtt_entry() local
863 return (offset >= gvt->device_info.gtt_start_offset && in gtt_entry()
864 offset < gvt->device_info.gtt_start_offset + gvt_ggtt_sz(gvt)) ? in gtt_entry()
1049 pgoff = (gvt_aperture_pa_base(vgpu->gvt) >> PAGE_SHIFT) + pgoff; in intel_vgpu_mmap()
[all …]
Dgtt.c92 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_g2h()
110 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_h2g()
112 if (drm_WARN(&i915->drm, !gvt_gmadr_is_valid(vgpu->gvt, h_addr), in intel_gvt_ggtt_gmadr_h2g()
116 if (gvt_gmadr_is_aperture(vgpu->gvt, h_addr)) in intel_gvt_ggtt_gmadr_h2g()
118 + (h_addr - gvt_aperture_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g()
121 + (h_addr - gvt_hidden_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g()
326 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_get_entry64()
339 e->val64 = read_pte64(vgpu->gvt->gt->ggtt, index); in gtt_get_entry64()
351 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_set_entry64()
364 write_pte64(vgpu->gvt->gt->ggtt, index, e->val64); in gtt_set_entry64()
[all …]
Dmmio.h73 intel_gvt_render_mmio_to_engine(struct intel_gvt *gvt, unsigned int reg);
74 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt);
76 int intel_gvt_setup_mmio_info(struct intel_gvt *gvt);
77 void intel_gvt_clean_mmio_info(struct intel_gvt *gvt);
78 int intel_gvt_for_each_tracked_mmio(struct intel_gvt *gvt,
79 int (*handler)(struct intel_gvt *gvt, u32 offset, void *data),
82 struct intel_gvt_mmio_info *intel_gvt_find_mmio_info(struct intel_gvt *gvt,
101 bool intel_gvt_in_force_nonpriv_whitelist(struct intel_gvt *gvt,
110 void intel_gvt_restore_fence(struct intel_gvt *gvt);
111 void intel_gvt_restore_mmio(struct intel_gvt *gvt);
Dinterrupt.c150 struct intel_gvt *gvt, in regbase_to_irq_info() argument
153 struct intel_gvt_irq *irq = &gvt->irq; in regbase_to_irq_info()
181 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_imr_handler() local
182 const struct intel_gvt_irq_ops *ops = gvt->irq.ops; in intel_vgpu_reg_imr_handler()
211 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_master_irq_handler() local
212 const struct intel_gvt_irq_ops *ops = gvt->irq.ops; in intel_vgpu_reg_master_irq_handler()
250 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_ier_handler() local
251 struct drm_i915_private *i915 = gvt->gt->i915; in intel_vgpu_reg_ier_handler()
252 const struct intel_gvt_irq_ops *ops = gvt->irq.ops; in intel_vgpu_reg_ier_handler()
261 info = regbase_to_irq_info(gvt, ier_to_regbase(reg)); in intel_vgpu_reg_ier_handler()
[all …]
Dsched_policy.h41 int (*init)(struct intel_gvt *gvt);
42 void (*clean)(struct intel_gvt *gvt);
49 void intel_gvt_schedule(struct intel_gvt *gvt);
51 int intel_gvt_init_sched_policy(struct intel_gvt *gvt);
53 void intel_gvt_clean_sched_policy(struct intel_gvt *gvt);
63 void intel_gvt_kick_schedule(struct intel_gvt *gvt);
Dhandlers.c57 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt) in intel_gvt_get_device_type() argument
59 struct drm_i915_private *i915 = gvt->gt->i915; in intel_gvt_get_device_type()
75 static bool intel_gvt_match_device(struct intel_gvt *gvt, in intel_gvt_match_device() argument
78 return intel_gvt_get_device_type(gvt) & device; in intel_gvt_match_device()
93 struct intel_gvt_mmio_info *intel_gvt_find_mmio_info(struct intel_gvt *gvt, in intel_gvt_find_mmio_info() argument
98 hash_for_each_possible(gvt->mmio.mmio_info_table, e, node, offset) { in intel_gvt_find_mmio_info()
105 static int setup_mmio_info(struct intel_gvt *gvt, u32 offset, u32 size, in setup_mmio_info() argument
112 if (!intel_gvt_match_device(gvt, device)) in setup_mmio_info()
122 p = intel_gvt_find_mmio_info(gvt, i); in setup_mmio_info()
129 gvt->mmio.mmio_attribute[i / 4] = flags; in setup_mmio_info()
[all …]
Dmmio_context.c167 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs() local
169 u32 cnt = gvt->engine_mmio_list.mocs_mmio_offset_list_cnt; in load_render_mocs()
170 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs()
206 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit() local
208 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit()
222 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit()
357 u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list; in handle_tlb_pending_event()
358 u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt; in handle_tlb_pending_event()
484 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio()
583 void intel_gvt_init_engine_mmio_context(struct intel_gvt *gvt) in intel_gvt_init_engine_mmio_context() argument
[all …]
Dscheduler.c87 struct drm_i915_private *dev_priv = workload->vgpu->gvt->gt->i915; in sr_oa_regs()
130 struct intel_gvt *gvt = vgpu->gvt; in populate_shadow_context() local
218 if (IS_BROADWELL(gvt->gt->i915) && workload->engine->id == RCS0) in populate_shadow_context()
290 struct intel_gvt *gvt = container_of(nb, struct intel_gvt, in shadow_context_status_change() local
292 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in shadow_context_status_change()
524 struct intel_gvt *gvt = workload->vgpu->gvt; in prepare_shadow_batch_buffer() local
525 const int gmadr_bytes = gvt->device_info.gmadr_bytes_in_cmd; in prepare_shadow_batch_buffer()
849 pick_next_workload(struct intel_gvt *gvt, struct intel_engine_cs *engine) in pick_next_workload() argument
851 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in pick_next_workload()
854 mutex_lock(&gvt->sched_lock); in pick_next_workload()
[all …]
Dcfg_space.c120 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_read()
126 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_read()
259 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_write()
266 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_write()
322 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_init_cfg_space() local
323 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev); in intel_vgpu_init_cfg_space()
324 const struct intel_gvt_device_info *info = &gvt->device_info; in intel_vgpu_init_cfg_space()
328 memcpy(vgpu_cfg_space(vgpu), gvt->firmware.cfg_space, in intel_vgpu_init_cfg_space()
343 gvt_aperture_pa_base(gvt), true); in intel_vgpu_init_cfg_space()
Dcmd_parser.c517 (s->vgpu->gvt->device_info.gmadr_bytes_in_cmd >> 2)
663 find_cmd_entry(struct intel_gvt *gvt, unsigned int opcode, in find_cmd_entry() argument
668 hash_for_each_possible(gvt->cmd_table, e, hlist, opcode) { in find_cmd_entry()
677 get_cmd_info(struct intel_gvt *gvt, u32 cmd, in get_cmd_info() argument
686 return find_cmd_entry(gvt, opcode, engine); in get_cmd_info()
893 struct intel_gvt *gvt = vgpu->gvt; in cmd_reg_handler() local
897 if (offset + 4 > gvt->device_info.mmio_size) { in cmd_reg_handler()
906 intel_gvt_mmio_set_cmd_accessible(gvt, offset); in cmd_reg_handler()
907 mmio_info = intel_gvt_find_mmio_info(gvt, offset); in cmd_reg_handler()
909 intel_gvt_mmio_set_cmd_write_patch(gvt, offset); in cmd_reg_handler()
[all …]
Dcmd_parser.h46 void intel_gvt_clean_cmd_parser(struct intel_gvt *gvt);
48 int intel_gvt_init_cmd_parser(struct intel_gvt *gvt);
Ddisplay.c61 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in edp_pipe_is_enabled()
73 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pipe_is_enabled()
173 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in emulate_monitor_status_change()
529 intel_gvt_request_service(vgpu->gvt, in vblank_timer_fn()
538 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in setup_virtual_dp_monitor()
621 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in emulate_vblank_on_pipe()
653 for_each_pipe(vgpu->gvt->gt->i915, pipe) in intel_vgpu_emulate_vblank()
668 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_hotplug()
757 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_clean_display()
783 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_init_display()
Dgtt.h226 int intel_gvt_init_gtt(struct intel_gvt *gvt);
228 void intel_gvt_clean_gtt(struct intel_gvt *gvt);
295 void intel_gvt_restore_ggtt(struct intel_gvt *gvt);
Dscheduler.h141 int intel_gvt_init_workload_scheduler(struct intel_gvt *gvt);
143 void intel_gvt_clean_workload_scheduler(struct intel_gvt *gvt);
Dedid.c140 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gmbus0_mmio_write()
281 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gmbus3_mmio_write()
378 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_i2c_handle_gmbus_read()
408 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_i2c_handle_gmbus_write()
484 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_i2c_handle_aux_ch_write()
Dmmio_context.h63 void intel_gvt_init_engine_mmio_context(struct intel_gvt *gvt);
Dfb_decoder.c150 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_get_stride()
206 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_decode_primary_plane()
336 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_decode_cursor_plane()

12