Lines Matching refs:mst_state
3671 int drm_dp_mst_topology_mgr_set_mst(struct drm_dp_mst_topology_mgr *mgr, bool mst_state) in drm_dp_mst_topology_mgr_set_mst() argument
3678 if (mst_state == mgr->mst_state) in drm_dp_mst_topology_mgr_set_mst()
3681 mgr->mst_state = mst_state; in drm_dp_mst_topology_mgr_set_mst()
3683 if (mst_state) { in drm_dp_mst_topology_mgr_set_mst()
3786 if (mgr->mst_state && mgr->mst_primary) in drm_dp_mst_topology_mgr_suspend()
5010 struct drm_dp_mst_topology_state *mst_state = in drm_dp_mst_destroy_state() local
5014 list_for_each_entry_safe(pos, tmp, &mst_state->vcpis, next) { in drm_dp_mst_destroy_state()
5021 kfree(mst_state); in drm_dp_mst_destroy_state()
5139 struct drm_dp_mst_topology_state *mst_state) in drm_dp_mst_atomic_check_vcpi_alloc_limit() argument
5144 list_for_each_entry(vcpi, &mst_state->vcpis, next) { in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5158 vcpi->port, mst_state, in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5165 mgr, mst_state, mgr->max_payloads); in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5170 mgr, mst_state, avail_slots, in drm_dp_mst_atomic_check_vcpi_alloc_limit()
5191 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_add_affected_dsc_crtcs() local
5198 mst_state = drm_atomic_get_mst_topology_state(state, mgr); in drm_dp_mst_add_affected_dsc_crtcs()
5200 if (IS_ERR(mst_state)) in drm_dp_mst_add_affected_dsc_crtcs()
5203 list_for_each_entry(pos, &mst_state->vcpis, next) { in drm_dp_mst_add_affected_dsc_crtcs()
5223 crtc_state = drm_atomic_get_crtc_state(mst_state->base.state, crtc); in drm_dp_mst_add_affected_dsc_crtcs()
5256 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_atomic_enable_dsc() local
5261 mst_state = drm_atomic_get_mst_topology_state(state, port->mgr); in drm_dp_mst_atomic_enable_dsc()
5263 if (IS_ERR(mst_state)) in drm_dp_mst_atomic_enable_dsc()
5264 return PTR_ERR(mst_state); in drm_dp_mst_atomic_enable_dsc()
5266 list_for_each_entry(pos, &mst_state->vcpis, next) { in drm_dp_mst_atomic_enable_dsc()
5275 port, mst_state); in drm_dp_mst_atomic_enable_dsc()
5322 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_atomic_check() local
5325 for_each_new_mst_mgr_in_state(state, mgr, mst_state, i) { in drm_dp_mst_atomic_check()
5326 if (!mgr->mst_state) in drm_dp_mst_atomic_check()
5329 ret = drm_dp_mst_atomic_check_vcpi_alloc_limit(mgr, mst_state); in drm_dp_mst_atomic_check()
5335 mst_state); in drm_dp_mst_atomic_check()
5391 struct drm_dp_mst_topology_state *mst_state; in drm_dp_mst_topology_mgr_init() local
5436 mst_state = kzalloc(sizeof(*mst_state), GFP_KERNEL); in drm_dp_mst_topology_mgr_init()
5437 if (mst_state == NULL) in drm_dp_mst_topology_mgr_init()
5440 mst_state->mgr = mgr; in drm_dp_mst_topology_mgr_init()
5441 INIT_LIST_HEAD(&mst_state->vcpis); in drm_dp_mst_topology_mgr_init()
5444 &mst_state->base, in drm_dp_mst_topology_mgr_init()