Lines Matching refs:new_dbuf_state

634 	struct intel_dbuf_state *new_dbuf_state =  in skl_crtc_allocate_ddb()  local
645 if (new_dbuf_state->weight[pipe] == 0) { in skl_crtc_allocate_ddb()
646 skl_ddb_entry_init(&new_dbuf_state->ddb[pipe], 0, 0); in skl_crtc_allocate_ddb()
650 dbuf_slice_mask = new_dbuf_state->slices[pipe]; in skl_crtc_allocate_ddb()
656 intel_crtc_dbuf_weights(new_dbuf_state, pipe, in skl_crtc_allocate_ddb()
662 skl_ddb_entry_init(&new_dbuf_state->ddb[pipe], in skl_crtc_allocate_ddb()
667 if (old_dbuf_state->slices[pipe] == new_dbuf_state->slices[pipe] && in skl_crtc_allocate_ddb()
669 &new_dbuf_state->ddb[pipe])) in skl_crtc_allocate_ddb()
672 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_crtc_allocate_ddb()
684 crtc_state->wm.skl.ddb.start = mbus_offset + new_dbuf_state->ddb[pipe].start; in skl_crtc_allocate_ddb()
685 crtc_state->wm.skl.ddb.end = mbus_offset + new_dbuf_state->ddb[pipe].end; in skl_crtc_allocate_ddb()
690 old_dbuf_state->slices[pipe], new_dbuf_state->slices[pipe], in skl_crtc_allocate_ddb()
692 new_dbuf_state->ddb[pipe].start, new_dbuf_state->ddb[pipe].end, in skl_crtc_allocate_ddb()
693 old_dbuf_state->active_pipes, new_dbuf_state->active_pipes); in skl_crtc_allocate_ddb()
2563 struct intel_dbuf_state *new_dbuf_state = NULL; in skl_compute_ddb() local
2570 new_dbuf_state = intel_atomic_get_dbuf_state(state); in skl_compute_ddb()
2571 if (IS_ERR(new_dbuf_state)) in skl_compute_ddb()
2572 return PTR_ERR(new_dbuf_state); in skl_compute_ddb()
2578 if (!new_dbuf_state) in skl_compute_ddb()
2581 new_dbuf_state->active_pipes = in skl_compute_ddb()
2584 if (old_dbuf_state->active_pipes != new_dbuf_state->active_pipes) { in skl_compute_ddb()
2585 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2591 new_dbuf_state->joined_mbus = in skl_compute_ddb()
2592 adlp_check_mbus_joined(new_dbuf_state->active_pipes); in skl_compute_ddb()
2597 new_dbuf_state->slices[pipe] = in skl_compute_ddb()
2598 skl_compute_dbuf_slices(crtc, new_dbuf_state->active_pipes, in skl_compute_ddb()
2599 new_dbuf_state->joined_mbus); in skl_compute_ddb()
2601 if (old_dbuf_state->slices[pipe] == new_dbuf_state->slices[pipe]) in skl_compute_ddb()
2604 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2609 new_dbuf_state->enabled_slices = intel_dbuf_enabled_slices(new_dbuf_state); in skl_compute_ddb()
2611 if (old_dbuf_state->enabled_slices != new_dbuf_state->enabled_slices || in skl_compute_ddb()
2612 old_dbuf_state->joined_mbus != new_dbuf_state->joined_mbus) { in skl_compute_ddb()
2613 ret = intel_atomic_serialize_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2617 if (old_dbuf_state->joined_mbus != new_dbuf_state->joined_mbus) { in skl_compute_ddb()
2627 new_dbuf_state->enabled_slices, in skl_compute_ddb()
2630 str_yes_no(new_dbuf_state->joined_mbus)); in skl_compute_ddb()
2636 new_dbuf_state->weight[pipe] = intel_crtc_ddb_weight(new_crtc_state); in skl_compute_ddb()
2638 if (old_dbuf_state->weight[pipe] == new_dbuf_state->weight[pipe]) in skl_compute_ddb()
2641 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_compute_ddb()
3515 const struct intel_dbuf_state *new_dbuf_state = in intel_dbuf_pre_plane_update() local
3520 if (!new_dbuf_state || in intel_dbuf_pre_plane_update()
3521 (new_dbuf_state->enabled_slices == old_dbuf_state->enabled_slices && in intel_dbuf_pre_plane_update()
3522 new_dbuf_state->joined_mbus == old_dbuf_state->joined_mbus)) in intel_dbuf_pre_plane_update()
3525 WARN_ON(!new_dbuf_state->base.changed); in intel_dbuf_pre_plane_update()
3530 new_dbuf_state->enabled_slices); in intel_dbuf_pre_plane_update()
3536 const struct intel_dbuf_state *new_dbuf_state = in intel_dbuf_post_plane_update() local
3541 if (!new_dbuf_state || in intel_dbuf_post_plane_update()
3542 (new_dbuf_state->enabled_slices == old_dbuf_state->enabled_slices && in intel_dbuf_post_plane_update()
3543 new_dbuf_state->joined_mbus == old_dbuf_state->joined_mbus)) in intel_dbuf_post_plane_update()
3546 WARN_ON(!new_dbuf_state->base.changed); in intel_dbuf_post_plane_update()
3549 new_dbuf_state->enabled_slices); in intel_dbuf_post_plane_update()
3574 const struct intel_dbuf_state *new_dbuf_state, *old_dbuf_state; in intel_mbus_dbox_update() local
3583 new_dbuf_state = intel_atomic_get_new_dbuf_state(state); in intel_mbus_dbox_update()
3585 if (!new_dbuf_state || in intel_mbus_dbox_update()
3586 (new_dbuf_state->joined_mbus == old_dbuf_state->joined_mbus && in intel_mbus_dbox_update()
3587 new_dbuf_state->active_pipes == old_dbuf_state->active_pipes)) in intel_mbus_dbox_update()
3600 val |= new_dbuf_state->joined_mbus ? MBUS_DBOX_A_CREDIT(12) : in intel_mbus_dbox_update()
3604 val |= new_dbuf_state->joined_mbus ? MBUS_DBOX_A_CREDIT(6) : in intel_mbus_dbox_update()
3630 new_dbuf_state->active_pipes)) in intel_mbus_dbox_update()