Lines Matching refs:mst_state

3706 int drm_dp_mst_topology_mgr_set_mst(struct drm_dp_mst_topology_mgr *mgr, bool mst_state)  in drm_dp_mst_topology_mgr_set_mst()  argument
3713 if (mst_state == mgr->mst_state) in drm_dp_mst_topology_mgr_set_mst()
3716 mgr->mst_state = mst_state; in drm_dp_mst_topology_mgr_set_mst()
3718 if (mst_state) { in drm_dp_mst_topology_mgr_set_mst()
3827 if (mgr->mst_state && mgr->mst_primary) in drm_dp_mst_topology_mgr_suspend()
5095 struct drm_dp_mst_topology_state *mst_state = in drm_dp_mst_destroy_state() local
5099 list_for_each_entry_safe(pos, tmp, &mst_state->vcpis, next) { in drm_dp_mst_destroy_state()
5106 kfree(mst_state); in drm_dp_mst_destroy_state()
5228 struct drm_dp_mst_topology_state *mst_state) in drm_dp_mst_atomic_check_vcpi_alloc_limit() argument
5233 list_for_each_entry(vcpi, &mst_state->vcpis, next) { in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5248 vcpi->port, mst_state, avail_slots + vcpi->vcpi); in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5255 mgr, mst_state, mgr->max_payloads); in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5260 mgr, mst_state, avail_slots, 63 - avail_slots); in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5280 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_add_affected_dsc_crtcs() local
5287 mst_state = drm_atomic_get_mst_topology_state(state, mgr); in drm_dp_mst_add_affected_dsc_crtcs()
5289 if (IS_ERR(mst_state)) in drm_dp_mst_add_affected_dsc_crtcs()
5292 list_for_each_entry(pos, &mst_state->vcpis, next) { in drm_dp_mst_add_affected_dsc_crtcs()
5312 crtc_state = drm_atomic_get_crtc_state(mst_state->base.state, crtc); in drm_dp_mst_add_affected_dsc_crtcs()
5345 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_atomic_enable_dsc() local
5350 mst_state = drm_atomic_get_mst_topology_state(state, port->mgr); in drm_dp_mst_atomic_enable_dsc()
5352 if (IS_ERR(mst_state)) in drm_dp_mst_atomic_enable_dsc()
5353 return PTR_ERR(mst_state); in drm_dp_mst_atomic_enable_dsc()
5355 list_for_each_entry(pos, &mst_state->vcpis, next) { in drm_dp_mst_atomic_enable_dsc()
5365 port, mst_state); in drm_dp_mst_atomic_enable_dsc()
5414 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_atomic_check() local
5417 for_each_new_mst_mgr_in_state(state, mgr, mst_state, i) { in drm_dp_mst_atomic_check()
5418 if (!mgr->mst_state) in drm_dp_mst_atomic_check()
5421 ret = drm_dp_mst_atomic_check_vcpi_alloc_limit(mgr, mst_state); in drm_dp_mst_atomic_check()
5427 mst_state); in drm_dp_mst_atomic_check()
5486 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_topology_mgr_init() local
5533 mst_state = kzalloc(sizeof(*mst_state), GFP_KERNEL); in drm_dp_mst_topology_mgr_init()
5534 if (mst_state == NULL) in drm_dp_mst_topology_mgr_init()
5537 mst_state->mgr = mgr; in drm_dp_mst_topology_mgr_init()
5538 INIT_LIST_HEAD(&mst_state->vcpis); in drm_dp_mst_topology_mgr_init()
5541 &mst_state->base, in drm_dp_mst_topology_mgr_init()