Home
last modified time | relevance | path

Searched refs:dpu_kms (Results 1 – 20 of 20) sorted by relevance

/linux-5.19.10/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_kms.c53 static void _dpu_kms_mmu_destroy(struct dpu_kms *dpu_kms);
59 struct dpu_kms *kms = (struct dpu_kms *)s->private; in _dpu_danger_signal_status()
109 struct dpu_kms *kms = file->private_data; in _dpu_plane_danger_read()
118 static void _dpu_plane_set_danger_state(struct dpu_kms *kms, bool enable) in _dpu_plane_set_danger_state()
144 struct dpu_kms *kms = file->private_data; in _dpu_plane_danger_write()
173 static void dpu_debugfs_danger_init(struct dpu_kms *dpu_kms, in dpu_debugfs_danger_init() argument
179 dpu_kms, &dpu_debugfs_danger_stats_fops); in dpu_debugfs_danger_init()
181 dpu_kms, &dpu_debugfs_safe_stats_fops); in dpu_debugfs_danger_init()
183 dpu_kms, &dpu_plane_danger_enable); in dpu_debugfs_danger_init()
193 struct dpu_kms *dpu_kms; member
[all …]
Ddpu_hw_interrupts.c152 static void dpu_core_irq_callback_handler(struct dpu_kms *dpu_kms, int irq_idx) in dpu_core_irq_callback_handler() argument
156 if (!dpu_kms->hw_intr->irq_tbl[irq_idx].cb) in dpu_core_irq_callback_handler()
159 atomic_inc(&dpu_kms->hw_intr->irq_tbl[irq_idx].count); in dpu_core_irq_callback_handler()
164 dpu_kms->hw_intr->irq_tbl[irq_idx].cb(dpu_kms->hw_intr->irq_tbl[irq_idx].arg, irq_idx); in dpu_core_irq_callback_handler()
169 struct dpu_kms *dpu_kms = to_dpu_kms(kms); in dpu_core_irq() local
170 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq()
209 dpu_core_irq_callback_handler(dpu_kms, irq_idx); in dpu_core_irq()
326 static void dpu_clear_irqs(struct dpu_kms *dpu_kms) in dpu_clear_irqs() argument
328 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_clear_irqs()
344 static void dpu_disable_all_irqs(struct dpu_kms *dpu_kms) in dpu_disable_all_irqs() argument
[all …]
Ddpu_vbif.h50 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms,
58 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms,
65 void dpu_vbif_clear_errors(struct dpu_kms *dpu_kms);
71 void dpu_vbif_init_memtypes(struct dpu_kms *dpu_kms);
73 void dpu_debugfs_vbif_init(struct dpu_kms *dpu_kms, struct dentry *debugfs_root);
Ddpu_vbif.c148 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms, in dpu_vbif_set_ot_limit() argument
157 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_ot_limit()
159 for (i = 0; i < ARRAY_SIZE(dpu_kms->hw_vbif); i++) { in dpu_vbif_set_ot_limit()
160 if (dpu_kms->hw_vbif[i] && in dpu_vbif_set_ot_limit()
161 dpu_kms->hw_vbif[i]->idx == params->vbif_idx) in dpu_vbif_set_ot_limit()
162 vbif = dpu_kms->hw_vbif[i]; in dpu_vbif_set_ot_limit()
204 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms, in dpu_vbif_set_qos_remap() argument
213 if (!params || !dpu_kms->hw_mdp) { in dpu_vbif_set_qos_remap()
217 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_qos_remap()
219 for (i = 0; i < ARRAY_SIZE(dpu_kms->hw_vbif); i++) { in dpu_vbif_set_qos_remap()
[all …]
Ddpu_core_irq.h39 struct dpu_kms *dpu_kms,
54 struct dpu_kms *dpu_kms,
69 struct dpu_kms *dpu_kms,
77 void dpu_debugfs_core_irq_init(struct dpu_kms *dpu_kms,
Ddpu_kms.h68 struct dpu_kms { struct
122 #define to_dpu_kms(x) container_of(x, struct dpu_kms, base) argument
140 *dpu_kms_get_existing_global_state(struct dpu_kms *dpu_kms);
171 uint32_t offset, uint32_t length, struct dpu_kms *dpu_kms);
183 void *dpu_debugfs_get_root(struct dpu_kms *dpu_kms);
205 u64 dpu_kms_get_clk_rate(struct dpu_kms *dpu_kms, char *clock_name);
Ddpu_encoder_phys_cmd.c196 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in _dpu_encoder_phys_cmd_handle_ppdone_timeout()
265 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_control_vblank_irq()
270 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_control_vblank_irq()
292 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
296 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
303 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
309 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
312 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
315 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
329 struct dpu_kms *dpu_kms; in dpu_encoder_phys_cmd_tearcheck_config() local
[all …]
Ddpu_core_perf.c40 static u64 _dpu_core_perf_calc_bw(struct dpu_kms *kms, in _dpu_core_perf_calc_bw()
72 static u64 _dpu_core_perf_calc_clk(struct dpu_kms *kms, in _dpu_core_perf_calc_clk()
102 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms()
109 static void _dpu_core_perf_calc_crtc(struct dpu_kms *kms, in _dpu_core_perf_calc_crtc()
149 struct dpu_kms *kms; in dpu_core_perf_crtc_check()
209 static int _dpu_core_perf_crtc_update_bus(struct dpu_kms *kms, in _dpu_core_perf_crtc_update_bus()
260 struct dpu_kms *kms; in dpu_core_perf_crtc_release_bw()
287 static u64 _dpu_core_perf_get_core_clk_rate(struct dpu_kms *kms) in _dpu_core_perf_get_core_clk_rate()
319 struct dpu_kms *kms; in dpu_core_perf_crtc_update()
468 int dpu_core_perf_debugfs_init(struct dpu_kms *dpu_kms, struct dentry *parent) in dpu_core_perf_debugfs_init() argument
[all …]
Ddpu_encoder.c327 irq_status = dpu_core_irq_read(phys_enc->dpu_kms, irq); in dpu_encoder_helper_wait_for_irq()
483 struct dpu_kms *dpu_kms, in dpu_encoder_get_topology() argument
507 else if (!dpu_kms->catalog->caps->has_3d_merge) in dpu_encoder_get_topology()
513 if (dpu_kms->catalog->dspp && in dpu_encoder_get_topology()
514 (dpu_kms->catalog->dspp_count >= topology.num_lm)) in dpu_encoder_get_topology()
543 struct dpu_kms *dpu_kms; in dpu_encoder_virt_atomic_check() local
560 dpu_kms = to_dpu_kms(priv->kms); in dpu_encoder_virt_atomic_check()
582 topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode); in dpu_encoder_virt_atomic_check()
594 ret = dpu_rm_reserve(&dpu_kms->rm, global_state, in dpu_encoder_virt_atomic_check()
609 struct dpu_kms *dpu_kms; in _dpu_encoder_update_vsync_source() local
[all …]
Ddpu_encoder_phys_wb.c56 dpu_vbif_set_ot_limit(phys_enc->dpu_kms, &ot_params); in dpu_encoder_phys_wb_set_ot_limit()
93 dpu_vbif_set_qos_remap(phys_enc->dpu_kms, &qos_params); in dpu_encoder_phys_wb_set_qos_remap()
107 if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) { in dpu_encoder_phys_wb_set_qos()
112 catalog = phys_enc->dpu_kms->catalog; in dpu_encoder_phys_wb_set_qos()
144 if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) { in dpu_encoder_phys_wb_setup_fb()
167 cdp_cfg.enable = phys_enc->dpu_kms->catalog->perf.cdp_cfg in dpu_encoder_phys_wb_setup_fb()
408 dpu_core_irq_register_callback(phys->dpu_kms, in dpu_encoder_phys_wb_irq_ctrl()
412 dpu_core_irq_unregister_callback(phys->dpu_kms, phys->irq[INTR_IDX_WB_DONE]); in dpu_encoder_phys_wb_irq_ctrl()
597 aspace = phys_enc->dpu_kms->base.aspace; in dpu_encoder_phys_wb_prepare_wb_job()
651 aspace = phys_enc->dpu_kms->base.aspace; in dpu_encoder_phys_wb_cleanup_wb_job()
[all …]
Ddpu_core_perf.h125 struct dpu_kms;
132 int dpu_core_perf_debugfs_init(struct dpu_kms *dpu_kms, struct dentry *parent);
Ddpu_writeback.c12 struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms); in dpu_wb_conn_get_modes() local
21 return drm_add_modes_noedid(connector, dpu_kms->catalog->caps->max_mixer_width, in dpu_wb_conn_get_modes()
Ddpu_hw_sspp.h374 struct dpu_kms;
394 void dpu_debugfs_sspp_init(struct dpu_kms *dpu_kms, struct dentry *debugfs_root);
395 int _dpu_hw_sspp_init_debugfs(struct dpu_hw_pipe *hw_pipe, struct dpu_kms *kms, struct dentry *entr…
Ddpu_encoder_phys.h208 struct dpu_kms *dpu_kms; member
284 struct dpu_kms *dpu_kms; member
Ddpu_plane.c120 static struct dpu_kms *_dpu_plane_get_kms(struct drm_plane *plane) in _dpu_plane_get_kms()
143 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_calc_bw() local
163 hw_latency_lines = dpu_kms->catalog->perf.min_prefill_lines; in _dpu_plane_calc_bw()
436 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_ot_limit() local
449 dpu_vbif_set_ot_limit(dpu_kms, &ot_params); in _dpu_plane_set_ot_limit()
460 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_qos_remap() local
475 dpu_vbif_set_qos_remap(dpu_kms, &qos_params); in _dpu_plane_set_qos_remap()
484 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in _dpu_plane_set_scanout()
889 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in dpu_plane_prepare_fb()
1431 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in dpu_plane_danger_signal_ctrl() local
[all …]
Ddpu_encoder_phys_vid.c386 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_control_vblank_irq()
391 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_control_vblank_irq()
519 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_prepare_for_kickoff()
609 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_irq_control()
615 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_irq_control()
693 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; in dpu_encoder_phys_vid_init()
701 phys_enc->dpu_kms = p->dpu_kms; in dpu_encoder_phys_vid_init()
Ddpu_crtc.h281 int dpu_crtc_register_custom_event(struct dpu_kms *kms,
Ddpu_crtc.c43 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms()
863 struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc); in dpu_crtc_commit_kickoff() local
899 dpu_vbif_clear_errors(dpu_kms); in dpu_crtc_commit_kickoff()
Ddpu_hw_sspp.c698 int _dpu_hw_sspp_init_debugfs(struct dpu_hw_pipe *hw_pipe, struct dpu_kms *kms, struct dentry *entr… in _dpu_hw_sspp_init_debugfs()
/linux-5.19.10/drivers/gpu/drm/msm/
DMakefile79 disp/dpu1/dpu_kms.o \