Home
last modified time | relevance | path

Searched refs:dpu_kms (Results 1 – 20 of 20) sorted by relevance

/linux-6.1.9/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_kms.c54 static void _dpu_kms_mmu_destroy(struct dpu_kms *dpu_kms);
60 struct dpu_kms *kms = (struct dpu_kms *)s->private; in _dpu_danger_signal_status()
110 struct dpu_kms *kms = file->private_data; in _dpu_plane_danger_read()
119 static void _dpu_plane_set_danger_state(struct dpu_kms *kms, bool enable) in _dpu_plane_set_danger_state()
145 struct dpu_kms *kms = file->private_data; in _dpu_plane_danger_write()
174 static void dpu_debugfs_danger_init(struct dpu_kms *dpu_kms, in dpu_debugfs_danger_init() argument
180 dpu_kms, &dpu_debugfs_danger_stats_fops); in dpu_debugfs_danger_init()
182 dpu_kms, &dpu_debugfs_safe_stats_fops); in dpu_debugfs_danger_init()
184 dpu_kms, &dpu_plane_danger_enable); in dpu_debugfs_danger_init()
194 struct dpu_kms *dpu_kms; member
[all …]
Ddpu_hw_interrupts.c152 static void dpu_core_irq_callback_handler(struct dpu_kms *dpu_kms, int irq_idx) in dpu_core_irq_callback_handler() argument
156 if (!dpu_kms->hw_intr->irq_tbl[irq_idx].cb) in dpu_core_irq_callback_handler()
159 atomic_inc(&dpu_kms->hw_intr->irq_tbl[irq_idx].count); in dpu_core_irq_callback_handler()
164 dpu_kms->hw_intr->irq_tbl[irq_idx].cb(dpu_kms->hw_intr->irq_tbl[irq_idx].arg, irq_idx); in dpu_core_irq_callback_handler()
169 struct dpu_kms *dpu_kms = to_dpu_kms(kms); in dpu_core_irq() local
170 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq()
209 dpu_core_irq_callback_handler(dpu_kms, irq_idx); in dpu_core_irq()
326 static void dpu_clear_irqs(struct dpu_kms *dpu_kms) in dpu_clear_irqs() argument
328 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_clear_irqs()
344 static void dpu_disable_all_irqs(struct dpu_kms *dpu_kms) in dpu_disable_all_irqs() argument
[all …]
Ddpu_vbif.h50 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms,
58 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms,
65 void dpu_vbif_clear_errors(struct dpu_kms *dpu_kms);
71 void dpu_vbif_init_memtypes(struct dpu_kms *dpu_kms);
73 void dpu_debugfs_vbif_init(struct dpu_kms *dpu_kms, struct dentry *debugfs_root);
Ddpu_core_irq.h39 struct dpu_kms *dpu_kms,
54 struct dpu_kms *dpu_kms,
69 struct dpu_kms *dpu_kms,
77 void dpu_debugfs_core_irq_init(struct dpu_kms *dpu_kms,
Ddpu_vbif.c14 static struct dpu_hw_vbif *dpu_get_vbif(struct dpu_kms *dpu_kms, enum dpu_vbif vbif_idx) in dpu_get_vbif() argument
16 if (vbif_idx < ARRAY_SIZE(dpu_kms->hw_vbif)) in dpu_get_vbif()
17 return dpu_kms->hw_vbif[vbif_idx]; in dpu_get_vbif()
168 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms, in dpu_vbif_set_ot_limit() argument
177 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_ot_limit()
179 vbif = dpu_get_vbif(dpu_kms, params->vbif_idx); in dpu_vbif_set_ot_limit()
219 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms, in dpu_vbif_set_qos_remap() argument
228 if (!params || !dpu_kms->hw_mdp) { in dpu_vbif_set_qos_remap()
232 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_qos_remap()
234 vbif = dpu_get_vbif(dpu_kms, params->vbif_idx); in dpu_vbif_set_qos_remap()
[all …]
Ddpu_kms.h68 struct dpu_kms { struct
122 #define to_dpu_kms(x) container_of(x, struct dpu_kms, base) argument
140 *dpu_kms_get_existing_global_state(struct dpu_kms *dpu_kms);
171 uint32_t offset, uint32_t length, struct dpu_kms *dpu_kms);
183 void *dpu_debugfs_get_root(struct dpu_kms *dpu_kms);
205 u64 dpu_kms_get_clk_rate(struct dpu_kms *dpu_kms, char *clock_name);
Ddpu_encoder_phys_cmd.c196 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in _dpu_encoder_phys_cmd_handle_ppdone_timeout()
265 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_control_vblank_irq()
270 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_control_vblank_irq()
292 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
296 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
303 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
309 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
312 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
315 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_cmd_irq_control()
329 struct dpu_kms *dpu_kms; in dpu_encoder_phys_cmd_tearcheck_config() local
[all …]
Ddpu_core_perf.c40 static u64 _dpu_core_perf_calc_bw(struct dpu_kms *kms, in _dpu_core_perf_calc_bw()
72 static u64 _dpu_core_perf_calc_clk(struct dpu_kms *kms, in _dpu_core_perf_calc_clk()
102 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms()
109 static void _dpu_core_perf_calc_crtc(struct dpu_kms *kms, in _dpu_core_perf_calc_crtc()
149 struct dpu_kms *kms; in dpu_core_perf_crtc_check()
209 static int _dpu_core_perf_crtc_update_bus(struct dpu_kms *kms, in _dpu_core_perf_crtc_update_bus()
260 struct dpu_kms *kms; in dpu_core_perf_crtc_release_bw()
287 static u64 _dpu_core_perf_get_core_clk_rate(struct dpu_kms *kms) in _dpu_core_perf_get_core_clk_rate()
319 struct dpu_kms *kms; in dpu_core_perf_crtc_update()
468 int dpu_core_perf_debugfs_init(struct dpu_kms *dpu_kms, struct dentry *parent) in dpu_core_perf_debugfs_init() argument
[all …]
Ddpu_encoder_phys_wb.c57 dpu_vbif_set_ot_limit(phys_enc->dpu_kms, &ot_params); in dpu_encoder_phys_wb_set_ot_limit()
94 dpu_vbif_set_qos_remap(phys_enc->dpu_kms, &qos_params); in dpu_encoder_phys_wb_set_qos_remap()
108 if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) { in dpu_encoder_phys_wb_set_qos()
113 catalog = phys_enc->dpu_kms->catalog; in dpu_encoder_phys_wb_set_qos()
144 if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) { in dpu_encoder_phys_wb_setup_fb()
167 cdp_cfg.enable = phys_enc->dpu_kms->catalog->perf->cdp_cfg in dpu_encoder_phys_wb_setup_fb()
408 dpu_core_irq_register_callback(phys->dpu_kms, in dpu_encoder_phys_wb_irq_ctrl()
412 dpu_core_irq_unregister_callback(phys->dpu_kms, phys->irq[INTR_IDX_WB_DONE]); in dpu_encoder_phys_wb_irq_ctrl()
597 aspace = phys_enc->dpu_kms->base.aspace; in dpu_encoder_phys_wb_prepare_wb_job()
651 aspace = phys_enc->dpu_kms->base.aspace; in dpu_encoder_phys_wb_cleanup_wb_job()
[all …]
Ddpu_encoder.c391 irq_status = dpu_core_irq_read(phys_enc->dpu_kms, irq); in dpu_encoder_helper_wait_for_irq()
547 struct dpu_kms *dpu_kms, in dpu_encoder_get_topology() argument
571 else if (!dpu_kms->catalog->caps->has_3d_merge) in dpu_encoder_get_topology()
577 if (dpu_kms->catalog->dspp && in dpu_encoder_get_topology()
578 (dpu_kms->catalog->dspp_count >= topology.num_lm)) in dpu_encoder_get_topology()
607 struct dpu_kms *dpu_kms; in dpu_encoder_virt_atomic_check() local
624 dpu_kms = to_dpu_kms(priv->kms); in dpu_encoder_virt_atomic_check()
646 topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode); in dpu_encoder_virt_atomic_check()
658 ret = dpu_rm_reserve(&dpu_kms->rm, global_state, in dpu_encoder_virt_atomic_check()
673 struct dpu_kms *dpu_kms; in _dpu_encoder_update_vsync_source() local
[all …]
Ddpu_core_perf.h125 struct dpu_kms;
132 int dpu_core_perf_debugfs_init(struct dpu_kms *dpu_kms, struct dentry *parent);
Ddpu_writeback.c14 struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms); in dpu_wb_conn_get_modes() local
23 return drm_add_modes_noedid(connector, dpu_kms->catalog->caps->max_mixer_width, in dpu_wb_conn_get_modes()
Ddpu_hw_sspp.h373 struct dpu_kms;
391 void dpu_debugfs_sspp_init(struct dpu_kms *dpu_kms, struct dentry *debugfs_root);
392 int _dpu_hw_sspp_init_debugfs(struct dpu_hw_pipe *hw_pipe, struct dpu_kms *kms, struct dentry *entr…
Ddpu_encoder_phys.h208 struct dpu_kms *dpu_kms; member
284 struct dpu_kms *dpu_kms; member
Ddpu_plane.c120 static struct dpu_kms *_dpu_plane_get_kms(struct drm_plane *plane) in _dpu_plane_get_kms()
143 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_calc_bw() local
163 hw_latency_lines = dpu_kms->catalog->perf->min_prefill_lines; in _dpu_plane_calc_bw()
424 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_ot_limit() local
437 dpu_vbif_set_ot_limit(dpu_kms, &ot_params); in _dpu_plane_set_ot_limit()
448 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_qos_remap() local
463 dpu_vbif_set_qos_remap(dpu_kms, &qos_params); in _dpu_plane_set_qos_remap()
472 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in _dpu_plane_set_scanout()
860 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in dpu_plane_prepare_fb()
1396 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in dpu_plane_danger_signal_ctrl() local
[all …]
Ddpu_encoder_phys_vid.c386 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_control_vblank_irq()
391 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_control_vblank_irq()
519 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_prepare_for_kickoff()
609 dpu_core_irq_register_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_irq_control()
615 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_phys_vid_irq_control()
693 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; in dpu_encoder_phys_vid_init()
701 phys_enc->dpu_kms = p->dpu_kms; in dpu_encoder_phys_vid_init()
Ddpu_crtc.h284 int dpu_crtc_register_custom_event(struct dpu_kms *kms,
Ddpu_crtc.c46 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms()
917 struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc); in dpu_crtc_commit_kickoff() local
953 dpu_vbif_clear_errors(dpu_kms); in dpu_crtc_commit_kickoff()
Ddpu_hw_sspp.c698 int _dpu_hw_sspp_init_debugfs(struct dpu_hw_pipe *hw_pipe, struct dpu_kms *kms, struct dentry *entr… in _dpu_hw_sspp_init_debugfs()
/linux-6.1.9/drivers/gpu/drm/msm/
DMakefile79 disp/dpu1/dpu_kms.o \