/linux-5.19.10/drivers/gpu/drm/amd/display/dc/dml/dcn301/ |
D | dcn301_fpu.c | 219 struct dcn_watermarks *wm_set, in calculate_wm_set_for_vlevel() argument 236 wm_set->urgent_ns = get_wm_urgent(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 237 …wm_set->cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(dml, pipes, pipe_cnt) … in calculate_wm_set_for_vlevel() 238 wm_set->cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 239 wm_set->cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 240 wm_set->pte_meta_urgent_ns = get_wm_memory_trip(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 241 wm_set->frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 242 wm_set->frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 243 wm_set->urgent_latency_ns = get_urgent_latency(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel()
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/ |
D | dcn316_clk_mgr.c | 413 struct dcn316_watermarks *table = clk_mgr_dcn316->smu_wm_set.wm_set; in dcn316_notify_wm_ranges() 633 clk_mgr->smu_wm_set.wm_set = (struct dcn316_watermarks *)dm_helpers_allocate_gpu_mem( in dcn316_clk_mgr_construct() 639 if (!clk_mgr->smu_wm_set.wm_set) { in dcn316_clk_mgr_construct() 640 clk_mgr->smu_wm_set.wm_set = &dummy_wms; in dcn316_clk_mgr_construct() 643 ASSERT(clk_mgr->smu_wm_set.wm_set); in dcn316_clk_mgr_construct() 716 if (clk_mgr->smu_wm_set.wm_set && clk_mgr->smu_wm_set.mc_address.quad_part != 0) in dcn316_clk_mgr_destroy() 718 clk_mgr->smu_wm_set.wm_set); in dcn316_clk_mgr_destroy()
|
D | dcn316_clk_mgr.h | 33 struct dcn316_watermarks *wm_set; member
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/ |
D | vg_clk_mgr.c | 445 struct watermarks *table = clk_mgr_vgh->smu_wm_set.wm_set; in vg_notify_wm_ranges() 737 clk_mgr->smu_wm_set.wm_set = (struct watermarks *)dm_helpers_allocate_gpu_mem( in vg_clk_mgr_construct() 743 if (!clk_mgr->smu_wm_set.wm_set) { in vg_clk_mgr_construct() 744 clk_mgr->smu_wm_set.wm_set = &dummy_wms; in vg_clk_mgr_construct() 747 ASSERT(clk_mgr->smu_wm_set.wm_set); in vg_clk_mgr_construct() 817 if (clk_mgr->smu_wm_set.wm_set && clk_mgr->smu_wm_set.mc_address.quad_part != 0) in vg_clk_mgr_destroy() 819 clk_mgr->smu_wm_set.wm_set); in vg_clk_mgr_destroy()
|
D | vg_clk_mgr.h | 33 struct watermarks *wm_set; member
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/ |
D | dcn31_clk_mgr.c | 472 struct dcn31_watermarks *table = clk_mgr_dcn31->smu_wm_set.wm_set; in dcn31_notify_wm_ranges() 682 clk_mgr->smu_wm_set.wm_set = (struct dcn31_watermarks *)dm_helpers_allocate_gpu_mem( in dcn31_clk_mgr_construct() 688 if (!clk_mgr->smu_wm_set.wm_set) { in dcn31_clk_mgr_construct() 689 clk_mgr->smu_wm_set.wm_set = &dummy_wms; in dcn31_clk_mgr_construct() 692 ASSERT(clk_mgr->smu_wm_set.wm_set); in dcn31_clk_mgr_construct() 758 if (clk_mgr->smu_wm_set.wm_set && clk_mgr->smu_wm_set.mc_address.quad_part != 0) in dcn31_clk_mgr_destroy() 760 clk_mgr->smu_wm_set.wm_set); in dcn31_clk_mgr_destroy()
|
D | dcn31_clk_mgr.h | 33 struct dcn31_watermarks *wm_set; member
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/ |
D | dcn315_clk_mgr.c | 407 struct dcn315_watermarks *table = clk_mgr_dcn315->smu_wm_set.wm_set; in dcn315_notify_wm_ranges() 611 clk_mgr->smu_wm_set.wm_set = (struct dcn315_watermarks *)dm_helpers_allocate_gpu_mem( in dcn315_clk_mgr_construct() 617 if (!clk_mgr->smu_wm_set.wm_set) { in dcn315_clk_mgr_construct() 618 clk_mgr->smu_wm_set.wm_set = &dummy_wms; in dcn315_clk_mgr_construct() 621 ASSERT(clk_mgr->smu_wm_set.wm_set); in dcn315_clk_mgr_construct() 684 if (clk_mgr->smu_wm_set.wm_set && clk_mgr->smu_wm_set.mc_address.quad_part != 0) in dcn315_clk_mgr_destroy() 686 clk_mgr->smu_wm_set.wm_set); in dcn315_clk_mgr_destroy()
|
D | dcn315_clk_mgr.h | 33 struct dcn315_watermarks *wm_set; member
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/dcn20/ |
D | dcn20_hubbub.c | 495 s->wm_set = 0; in hubbub2_wm_read_state() 506 s->wm_set = 1; in hubbub2_wm_read_state() 517 s->wm_set = 2; in hubbub2_wm_read_state() 528 s->wm_set = 3; in hubbub2_wm_read_state()
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/inc/hw/ |
D | dchubbub.h | 44 uint32_t wm_set; member
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/dcn10/ |
D | dcn10_hubbub.c | 53 s->wm_set = 0; in hubbub1_wm_read_state() 63 s->wm_set = 1; in hubbub1_wm_read_state() 73 s->wm_set = 2; in hubbub1_wm_read_state() 83 s->wm_set = 3; in hubbub1_wm_read_state()
|
D | dcn10_hw_sequencer_debug.c | 96 s->wm_set, in dcn10_get_hubbub_state()
|
D | dcn10_hw_sequencer.c | 153 DTN_INFO("WM_Set[%d]:", s->wm_set); in dcn10_log_hubbub_state()
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/dcn21/ |
D | dcn21_hubbub.c | 633 s->wm_set = 0; in hubbub21_wm_read_state() 647 s->wm_set = 1; in hubbub21_wm_read_state() 661 s->wm_set = 2; in hubbub21_wm_read_state() 675 s->wm_set = 3; in hubbub21_wm_read_state()
|
/linux-5.19.10/drivers/gpu/drm/amd/display/dc/dml/dcn20/ |
D | dcn20_fpu.c | 1780 struct dcn_watermarks *wm_set, in calculate_wm_set_for_vlevel() argument 1797 wm_set->urgent_ns = get_wm_urgent(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 1798 …wm_set->cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(dml, pipes, pipe_cnt) … in calculate_wm_set_for_vlevel() 1799 wm_set->cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 1800 wm_set->cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 1801 wm_set->pte_meta_urgent_ns = get_wm_memory_trip(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 1802 wm_set->frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 1803 wm_set->frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel() 1804 wm_set->urgent_latency_ns = get_urgent_latency(dml, pipes, pipe_cnt) * 1000; in calculate_wm_set_for_vlevel()
|