/linux-6.6.21/drivers/gpu/drm/i915/gvt/ ! |
D | Makefile | 4 gvt/aperture_gm.o \ 5 gvt/cfg_space.o \ 6 gvt/cmd_parser.o \ 7 gvt/debugfs.o \ 8 gvt/display.o \ 9 gvt/dmabuf.o \ 10 gvt/edid.o \ 11 gvt/execlist.o \ 12 gvt/fb_decoder.o \ 13 gvt/firmware.o \ [all …]
|
D | aperture_gm.c | 44 struct intel_gvt *gvt = vgpu->gvt; in alloc_gm() local 45 struct intel_gt *gt = gvt->gt; in alloc_gm() 54 start = ALIGN(gvt_hidden_gmadr_base(gvt), I915_GTT_PAGE_SIZE); in alloc_gm() 55 end = ALIGN(gvt_hidden_gmadr_end(gvt), I915_GTT_PAGE_SIZE); in alloc_gm() 60 start = ALIGN(gvt_aperture_gmadr_base(gvt), I915_GTT_PAGE_SIZE); in alloc_gm() 61 end = ALIGN(gvt_aperture_gmadr_end(gvt), I915_GTT_PAGE_SIZE); in alloc_gm() 82 struct intel_gvt *gvt = vgpu->gvt; in alloc_vgpu_gm() local 83 struct intel_gt *gt = gvt->gt; in alloc_vgpu_gm() 110 struct intel_gvt *gvt = vgpu->gvt; in free_vgpu_gm() local 111 struct intel_gt *gt = gvt->gt; in free_vgpu_gm() [all …]
|
D | sched_policy.c | 42 for_each_engine(engine, vgpu->gvt->gt, i) { in vgpu_has_pending_workload() 68 struct intel_gvt *gvt; member 80 if (!vgpu || vgpu == vgpu->gvt->idle_vgpu) in vgpu_update_timeslice() 132 static void try_to_schedule_next_vgpu(struct intel_gvt *gvt) in try_to_schedule_next_vgpu() argument 134 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in try_to_schedule_next_vgpu() 155 for_each_engine(engine, gvt->gt, i) { in try_to_schedule_next_vgpu() 172 for_each_engine(engine, gvt->gt, i) in try_to_schedule_next_vgpu() 213 struct intel_gvt *gvt = sched_data->gvt; in tbs_sched_func() local 214 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in tbs_sched_func() 233 scheduler->next_vgpu = gvt->idle_vgpu; in tbs_sched_func() [all …]
|
D | gvt.h | 184 struct intel_gvt *gvt; member 373 return i915->gvt; in to_gvt() 389 static inline void intel_gvt_request_service(struct intel_gvt *gvt, in intel_gvt_request_service() argument 392 set_bit(service, (void *)&gvt->service_request); in intel_gvt_request_service() 393 wake_up(&gvt->service_thread_wq); in intel_gvt_request_service() 396 void intel_gvt_free_firmware(struct intel_gvt *gvt); 397 int intel_gvt_load_firmware(struct intel_gvt *gvt); 407 #define gvt_to_ggtt(gvt) ((gvt)->gt->ggtt) argument 410 #define gvt_aperture_sz(gvt) gvt_to_ggtt(gvt)->mappable_end argument 411 #define gvt_aperture_pa_base(gvt) gvt_to_ggtt(gvt)->gmadr.start argument [all …]
|
D | mmio.c | 57 #define reg_is_mmio(gvt, reg) \ argument 58 (reg >= 0 && reg < gvt->device_info.mmio_size) 60 #define reg_is_gtt(gvt, reg) \ argument 61 (reg >= gvt->device_info.gtt_start_offset \ 62 && reg < gvt->device_info.gtt_start_offset + gvt_ggtt_sz(gvt)) 67 struct intel_gvt *gvt = NULL; in failsafe_emulate_mmio_rw() local 74 gvt = vgpu->gvt; in failsafe_emulate_mmio_rw() 77 if (reg_is_mmio(gvt, offset)) { in failsafe_emulate_mmio_rw() 84 } else if (reg_is_gtt(gvt, offset)) { in failsafe_emulate_mmio_rw() 85 offset -= gvt->device_info.gtt_start_offset; in failsafe_emulate_mmio_rw() [all …]
|
D | vgpu.c | 40 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in populate_pvinfo_page() 104 int intel_gvt_init_vgpu_types(struct intel_gvt *gvt) in intel_gvt_init_vgpu_types() argument 106 unsigned int low_avail = gvt_aperture_sz(gvt) - HOST_LOW_GM_SIZE; in intel_gvt_init_vgpu_types() 107 unsigned int high_avail = gvt_hidden_sz(gvt) - HOST_HIGH_GM_SIZE; in intel_gvt_init_vgpu_types() 111 gvt->types = kcalloc(num_types, sizeof(struct intel_vgpu_type), in intel_gvt_init_vgpu_types() 113 if (!gvt->types) in intel_gvt_init_vgpu_types() 116 gvt->mdev_types = kcalloc(num_types, sizeof(*gvt->mdev_types), in intel_gvt_init_vgpu_types() 118 if (!gvt->mdev_types) in intel_gvt_init_vgpu_types() 129 sprintf(gvt->types[i].name, "GVTg_V%u_%s", in intel_gvt_init_vgpu_types() 130 GRAPHICS_VER(gvt->gt->i915) == 8 ? 4 : 5, conf->name); in intel_gvt_init_vgpu_types() [all …]
|
D | debugfs.c | 58 static inline int mmio_diff_handler(struct intel_gvt *gvt, in mmio_diff_handler() argument 65 preg = intel_uncore_read_notrace(gvt->gt->uncore, _MMIO(offset)); in mmio_diff_handler() 87 struct intel_gvt *gvt = vgpu->gvt; in vgpu_mmio_diff_show() local 97 mutex_lock(&gvt->lock); in vgpu_mmio_diff_show() 98 spin_lock_bh(&gvt->scheduler.mmio_context_lock); in vgpu_mmio_diff_show() 100 mmio_hw_access_pre(gvt->gt); in vgpu_mmio_diff_show() 102 intel_gvt_for_each_tracked_mmio(gvt, mmio_diff_handler, ¶m); in vgpu_mmio_diff_show() 103 mmio_hw_access_post(gvt->gt); in vgpu_mmio_diff_show() 105 spin_unlock_bh(&gvt->scheduler.mmio_context_lock); in vgpu_mmio_diff_show() 106 mutex_unlock(&gvt->lock); in vgpu_mmio_diff_show() [all …]
|
D | firmware.c | 69 static int expose_firmware_sysfs(struct intel_gvt *gvt) in expose_firmware_sysfs() argument 71 struct intel_gvt_device_info *info = &gvt->device_info; in expose_firmware_sysfs() 72 struct drm_i915_private *i915 = gvt->gt->i915; in expose_firmware_sysfs() 96 memcpy(gvt->firmware.cfg_space, i915->vgpu.initial_cfg_space, in expose_firmware_sysfs() 98 memcpy(p, gvt->firmware.cfg_space, info->cfg_space_size); in expose_firmware_sysfs() 102 memcpy(gvt->firmware.mmio, i915->vgpu.initial_mmio, in expose_firmware_sysfs() 105 memcpy(p, gvt->firmware.mmio, info->mmio_size); in expose_firmware_sysfs() 121 static void clean_firmware_sysfs(struct intel_gvt *gvt) in clean_firmware_sysfs() argument 123 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev); in clean_firmware_sysfs() 134 void intel_gvt_free_firmware(struct intel_gvt *gvt) in intel_gvt_free_firmware() argument [all …]
|
D | kvmgt.c | 180 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in gvt_dma_map_page() 203 struct device *dev = vgpu->gvt->gt->i915->drm.dev; in gvt_dma_unmap_page() 639 mutex_lock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist() 640 for_each_active_vgpu(vgpu->gvt, itr, id) { in __kvmgt_vgpu_exist() 650 mutex_unlock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist() 775 aperture_va = io_mapping_map_wc(&vgpu->gvt->gt->ggtt->iomap, in intel_vgpu_aperture_rw() 842 struct intel_gvt *gvt = vgpu->gvt; in gtt_entry() local 852 return (offset >= gvt->device_info.gtt_start_offset && in gtt_entry() 853 offset < gvt->device_info.gtt_start_offset + gvt_ggtt_sz(gvt)) ? in gtt_entry() 1038 pgoff = (gvt_aperture_pa_base(vgpu->gvt) >> PAGE_SHIFT) + pgoff; in intel_vgpu_mmap() [all …]
|
D | gtt.c | 76 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_g2h() 94 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_h2g() 96 if (drm_WARN(&i915->drm, !gvt_gmadr_is_valid(vgpu->gvt, h_addr), in intel_gvt_ggtt_gmadr_h2g() 100 if (gvt_gmadr_is_aperture(vgpu->gvt, h_addr)) in intel_gvt_ggtt_gmadr_h2g() 102 + (h_addr - gvt_aperture_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g() 105 + (h_addr - gvt_hidden_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g() 305 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_get_entry64() 318 e->val64 = read_pte64(vgpu->gvt->gt->ggtt, index); in gtt_get_entry64() 330 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_set_entry64() 343 write_pte64(vgpu->gvt->gt->ggtt, index, e->val64); in gtt_set_entry64() [all …]
|
D | mmio.h | 73 intel_gvt_render_mmio_to_engine(struct intel_gvt *gvt, unsigned int reg); 74 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt); 76 int intel_gvt_setup_mmio_info(struct intel_gvt *gvt); 77 void intel_gvt_clean_mmio_info(struct intel_gvt *gvt); 78 int intel_gvt_for_each_tracked_mmio(struct intel_gvt *gvt, 79 int (*handler)(struct intel_gvt *gvt, u32 offset, void *data), 82 struct intel_gvt_mmio_info *intel_gvt_find_mmio_info(struct intel_gvt *gvt, 101 bool intel_gvt_in_force_nonpriv_whitelist(struct intel_gvt *gvt, 110 void intel_gvt_restore_fence(struct intel_gvt *gvt); 111 void intel_gvt_restore_mmio(struct intel_gvt *gvt);
|
D | interrupt.c | 150 struct intel_gvt *gvt, in regbase_to_irq_info() argument 153 struct intel_gvt_irq *irq = &gvt->irq; in regbase_to_irq_info() 181 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_imr_handler() local 182 const struct intel_gvt_irq_ops *ops = gvt->irq.ops; in intel_vgpu_reg_imr_handler() 211 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_master_irq_handler() local 212 const struct intel_gvt_irq_ops *ops = gvt->irq.ops; in intel_vgpu_reg_master_irq_handler() 250 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_ier_handler() local 251 struct drm_i915_private *i915 = gvt->gt->i915; in intel_vgpu_reg_ier_handler() 252 const struct intel_gvt_irq_ops *ops = gvt->irq.ops; in intel_vgpu_reg_ier_handler() 261 info = regbase_to_irq_info(gvt, ier_to_regbase(reg)); in intel_vgpu_reg_ier_handler() [all …]
|
D | sched_policy.h | 41 int (*init)(struct intel_gvt *gvt); 42 void (*clean)(struct intel_gvt *gvt); 49 void intel_gvt_schedule(struct intel_gvt *gvt); 51 int intel_gvt_init_sched_policy(struct intel_gvt *gvt); 53 void intel_gvt_clean_sched_policy(struct intel_gvt *gvt); 63 void intel_gvt_kick_schedule(struct intel_gvt *gvt);
|
D | handlers.c | 63 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt) in intel_gvt_get_device_type() argument 65 struct drm_i915_private *i915 = gvt->gt->i915; in intel_gvt_get_device_type() 81 static bool intel_gvt_match_device(struct intel_gvt *gvt, in intel_gvt_match_device() argument 84 return intel_gvt_get_device_type(gvt) & device; in intel_gvt_match_device() 99 struct intel_gvt_mmio_info *intel_gvt_find_mmio_info(struct intel_gvt *gvt, in intel_gvt_find_mmio_info() argument 104 hash_for_each_possible(gvt->mmio.mmio_info_table, e, node, offset) { in intel_gvt_find_mmio_info() 111 static int setup_mmio_info(struct intel_gvt *gvt, u32 offset, u32 size, in setup_mmio_info() argument 118 if (!intel_gvt_match_device(gvt, device)) in setup_mmio_info() 128 p = intel_gvt_find_mmio_info(gvt, i); in setup_mmio_info() 135 gvt->mmio.mmio_attribute[i / 4] = flags; in setup_mmio_info() [all …]
|
D | mmio_context.c | 168 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs() local 170 u32 cnt = gvt->engine_mmio_list.mocs_mmio_offset_list_cnt; in load_render_mocs() 171 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs() 207 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit() local 209 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit() 223 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit() 358 u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list; in handle_tlb_pending_event() 359 u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt; in handle_tlb_pending_event() 485 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio() 584 void intel_gvt_init_engine_mmio_context(struct intel_gvt *gvt) in intel_gvt_init_engine_mmio_context() argument [all …]
|
D | scheduler.c | 87 struct drm_i915_private *dev_priv = workload->vgpu->gvt->gt->i915; in sr_oa_regs() 130 struct intel_gvt *gvt = vgpu->gvt; in populate_shadow_context() local 218 if (IS_BROADWELL(gvt->gt->i915) && workload->engine->id == RCS0) in populate_shadow_context() 290 struct intel_gvt *gvt = container_of(nb, struct intel_gvt, in shadow_context_status_change() local 292 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in shadow_context_status_change() 524 struct intel_gvt *gvt = workload->vgpu->gvt; in prepare_shadow_batch_buffer() local 525 const int gmadr_bytes = gvt->device_info.gmadr_bytes_in_cmd; in prepare_shadow_batch_buffer() 848 pick_next_workload(struct intel_gvt *gvt, struct intel_engine_cs *engine) in pick_next_workload() argument 850 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in pick_next_workload() 853 mutex_lock(&gvt->sched_lock); in pick_next_workload() [all …]
|
D | cfg_space.c | 120 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_read() 126 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_read() 259 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_write() 266 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_write() 322 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_init_cfg_space() local 323 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev); in intel_vgpu_init_cfg_space() 324 const struct intel_gvt_device_info *info = &gvt->device_info; in intel_vgpu_init_cfg_space() 328 memcpy(vgpu_cfg_space(vgpu), gvt->firmware.cfg_space, in intel_vgpu_init_cfg_space() 343 gvt_aperture_pa_base(gvt), true); in intel_vgpu_init_cfg_space()
|
D | cmd_parser.c | 519 (s->vgpu->gvt->device_info.gmadr_bytes_in_cmd >> 2) 665 find_cmd_entry(struct intel_gvt *gvt, unsigned int opcode, in find_cmd_entry() argument 670 hash_for_each_possible(gvt->cmd_table, e, hlist, opcode) { in find_cmd_entry() 679 get_cmd_info(struct intel_gvt *gvt, u32 cmd, in get_cmd_info() argument 688 return find_cmd_entry(gvt, opcode, engine); in get_cmd_info() 895 struct intel_gvt *gvt = vgpu->gvt; in cmd_reg_handler() local 899 if (offset + 4 > gvt->device_info.mmio_size) { in cmd_reg_handler() 908 intel_gvt_mmio_set_cmd_accessible(gvt, offset); in cmd_reg_handler() 909 mmio_info = intel_gvt_find_mmio_info(gvt, offset); in cmd_reg_handler() 911 intel_gvt_mmio_set_cmd_write_patch(gvt, offset); in cmd_reg_handler() [all …]
|
D | cmd_parser.h | 46 void intel_gvt_clean_cmd_parser(struct intel_gvt *gvt); 48 int intel_gvt_init_cmd_parser(struct intel_gvt *gvt);
|
D | display.c | 64 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in edp_pipe_is_enabled() 76 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pipe_is_enabled() 176 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in emulate_monitor_status_change() 532 intel_gvt_request_service(vgpu->gvt, in vblank_timer_fn() 541 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in setup_virtual_dp_monitor() 624 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in emulate_vblank_on_pipe() 656 for_each_pipe(vgpu->gvt->gt->i915, pipe) in intel_vgpu_emulate_vblank() 671 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_hotplug() 760 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_clean_display() 786 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_init_display()
|
D | gtt.h | 226 int intel_gvt_init_gtt(struct intel_gvt *gvt); 227 void intel_gvt_clean_gtt(struct intel_gvt *gvt); 294 void intel_gvt_restore_ggtt(struct intel_gvt *gvt);
|
D | scheduler.h | 141 int intel_gvt_init_workload_scheduler(struct intel_gvt *gvt); 143 void intel_gvt_clean_workload_scheduler(struct intel_gvt *gvt);
|
D | edid.c | 141 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gmbus0_mmio_write() 282 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gmbus3_mmio_write() 379 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_i2c_handle_gmbus_read() 409 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_i2c_handle_gmbus_write() 481 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_i2c_handle_aux_ch_write()
|
D | mmio_context.h | 63 void intel_gvt_init_engine_mmio_context(struct intel_gvt *gvt);
|
D | fb_decoder.c | 150 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_get_stride() 206 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_decode_primary_plane() 336 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_decode_cursor_plane()
|