Lines Matching full:rm

37 int dpu_rm_destroy(struct dpu_rm *rm)  in dpu_rm_destroy()  argument
41 for (i = 0; i < ARRAY_SIZE(rm->dspp_blks); i++) { in dpu_rm_destroy()
44 if (rm->dspp_blks[i]) { in dpu_rm_destroy()
45 hw = to_dpu_hw_dspp(rm->dspp_blks[i]); in dpu_rm_destroy()
49 for (i = 0; i < ARRAY_SIZE(rm->pingpong_blks); i++) { in dpu_rm_destroy()
52 if (rm->pingpong_blks[i]) { in dpu_rm_destroy()
53 hw = to_dpu_hw_pingpong(rm->pingpong_blks[i]); in dpu_rm_destroy()
57 for (i = 0; i < ARRAY_SIZE(rm->merge_3d_blks); i++) { in dpu_rm_destroy()
60 if (rm->merge_3d_blks[i]) { in dpu_rm_destroy()
61 hw = to_dpu_hw_merge_3d(rm->merge_3d_blks[i]); in dpu_rm_destroy()
65 for (i = 0; i < ARRAY_SIZE(rm->mixer_blks); i++) { in dpu_rm_destroy()
68 if (rm->mixer_blks[i]) { in dpu_rm_destroy()
69 hw = to_dpu_hw_mixer(rm->mixer_blks[i]); in dpu_rm_destroy()
73 for (i = 0; i < ARRAY_SIZE(rm->ctl_blks); i++) { in dpu_rm_destroy()
76 if (rm->ctl_blks[i]) { in dpu_rm_destroy()
77 hw = to_dpu_hw_ctl(rm->ctl_blks[i]); in dpu_rm_destroy()
81 for (i = 0; i < ARRAY_SIZE(rm->hw_intf); i++) in dpu_rm_destroy()
82 dpu_hw_intf_destroy(rm->hw_intf[i]); in dpu_rm_destroy()
84 for (i = 0; i < ARRAY_SIZE(rm->dsc_blks); i++) { in dpu_rm_destroy()
87 if (rm->dsc_blks[i]) { in dpu_rm_destroy()
88 hw = to_dpu_hw_dsc(rm->dsc_blks[i]); in dpu_rm_destroy()
93 for (i = 0; i < ARRAY_SIZE(rm->hw_wb); i++) in dpu_rm_destroy()
94 dpu_hw_wb_destroy(rm->hw_wb[i]); in dpu_rm_destroy()
96 for (i = 0; i < ARRAY_SIZE(rm->hw_sspp); i++) in dpu_rm_destroy()
97 dpu_hw_sspp_destroy(rm->hw_sspp[i]); in dpu_rm_destroy()
102 int dpu_rm_init(struct dpu_rm *rm, in dpu_rm_init() argument
109 if (!rm || !cat || !mmio) { in dpu_rm_init()
115 memset(rm, 0, sizeof(*rm)); in dpu_rm_init()
128 rm->mixer_blks[lm->id - LM_0] = &hw->base; in dpu_rm_init()
142 rm->merge_3d_blks[merge_3d->id - MERGE_3D_0] = &hw->base; in dpu_rm_init()
157 hw->merge_3d = to_dpu_hw_merge_3d(rm->merge_3d_blks[pp->merge_3d - MERGE_3D_0]); in dpu_rm_init()
158 rm->pingpong_blks[pp->id - PINGPONG_0] = &hw->base; in dpu_rm_init()
171 rm->hw_intf[intf->id - INTF_0] = hw; in dpu_rm_init()
184 rm->hw_wb[wb->id - WB_0] = hw; in dpu_rm_init()
197 rm->ctl_blks[ctl->id - CTL_0] = &hw->base; in dpu_rm_init()
210 rm->dspp_blks[dspp->id - DSPP_0] = &hw->base; in dpu_rm_init()
227 rm->dsc_blks[dsc->id - DSC_0] = &hw->base; in dpu_rm_init()
240 rm->hw_sspp[sspp->id - SSPP_NONE] = hw; in dpu_rm_init()
246 dpu_rm_destroy(rm); in dpu_rm_init()
258 * @rm: dpu resource manager handle
259 * @primary_idx: index of primary mixer in rm->mixer_blks[]
261 static int _dpu_rm_get_lm_peer(struct dpu_rm *rm, int primary_idx) in _dpu_rm_get_lm_peer() argument
265 prim_lm_cfg = to_dpu_hw_mixer(rm->mixer_blks[primary_idx])->cap; in _dpu_rm_get_lm_peer()
276 * @rm: dpu resource manager handle
279 * @lm_idx: index of proposed layer mixer in rm->mixer_blks[], function checks
283 * mixer in rm->pingpong_blks[].
285 * mixer in rm->dspp_blks[].
286 * @reqs: input parameter, rm requirements for HW blocks needed in the
290 static bool _dpu_rm_check_lm_and_get_connected_blks(struct dpu_rm *rm, in _dpu_rm_check_lm_and_get_connected_blks() argument
304 lm_cfg = to_dpu_hw_mixer(rm->mixer_blks[lm_idx])->cap; in _dpu_rm_check_lm_and_get_connected_blks()
306 if (idx < 0 || idx >= ARRAY_SIZE(rm->pingpong_blks)) { in _dpu_rm_check_lm_and_get_connected_blks()
322 if (idx < 0 || idx >= ARRAY_SIZE(rm->dspp_blks)) { in _dpu_rm_check_lm_and_get_connected_blks()
337 static int _dpu_rm_reserve_lms(struct dpu_rm *rm, in _dpu_rm_reserve_lms() argument
354 for (i = 0; i < ARRAY_SIZE(rm->mixer_blks) && in _dpu_rm_reserve_lms()
356 if (!rm->mixer_blks[i]) in _dpu_rm_reserve_lms()
362 if (!_dpu_rm_check_lm_and_get_connected_blks(rm, global_state, in _dpu_rm_reserve_lms()
372 int j = _dpu_rm_get_lm_peer(rm, i); in _dpu_rm_reserve_lms()
378 if (!rm->mixer_blks[j]) in _dpu_rm_reserve_lms()
381 if (!_dpu_rm_check_lm_and_get_connected_blks(rm, in _dpu_rm_reserve_lms()
412 struct dpu_rm *rm, in _dpu_rm_reserve_ctls() argument
426 for (j = 0; j < ARRAY_SIZE(rm->ctl_blks); j++) { in _dpu_rm_reserve_ctls()
431 if (!rm->ctl_blks[j]) in _dpu_rm_reserve_ctls()
436 ctl = to_dpu_hw_ctl(rm->ctl_blks[j]); in _dpu_rm_reserve_ctls()
464 static int _dpu_rm_reserve_dsc(struct dpu_rm *rm, in _dpu_rm_reserve_dsc() argument
474 if (!rm->dsc_blks[i]) { in _dpu_rm_reserve_dsc()
492 struct dpu_rm *rm, in _dpu_rm_make_reservation() argument
499 ret = _dpu_rm_reserve_lms(rm, global_state, enc->base.id, reqs); in _dpu_rm_make_reservation()
505 ret = _dpu_rm_reserve_ctls(rm, global_state, enc->base.id, in _dpu_rm_make_reservation()
512 ret = _dpu_rm_reserve_dsc(rm, global_state, enc, &reqs->topology); in _dpu_rm_make_reservation()
560 struct dpu_rm *rm, in dpu_rm_reserve() argument
587 ret = _dpu_rm_make_reservation(rm, global_state, enc, &reqs); in dpu_rm_reserve()
596 int dpu_rm_get_assigned_resources(struct dpu_rm *rm, in dpu_rm_get_assigned_resources() argument
606 hw_blks = rm->pingpong_blks; in dpu_rm_get_assigned_resources()
608 max_blks = ARRAY_SIZE(rm->pingpong_blks); in dpu_rm_get_assigned_resources()
611 hw_blks = rm->mixer_blks; in dpu_rm_get_assigned_resources()
613 max_blks = ARRAY_SIZE(rm->mixer_blks); in dpu_rm_get_assigned_resources()
616 hw_blks = rm->ctl_blks; in dpu_rm_get_assigned_resources()
618 max_blks = ARRAY_SIZE(rm->ctl_blks); in dpu_rm_get_assigned_resources()
621 hw_blks = rm->dspp_blks; in dpu_rm_get_assigned_resources()
623 max_blks = ARRAY_SIZE(rm->dspp_blks); in dpu_rm_get_assigned_resources()
626 hw_blks = rm->dsc_blks; in dpu_rm_get_assigned_resources()
628 max_blks = ARRAY_SIZE(rm->dsc_blks); in dpu_rm_get_assigned_resources()
631 DPU_ERROR("blk type %d not managed by rm\n", type); in dpu_rm_get_assigned_resources()