Lines Matching refs:atom

2059 	struct nv50_atom *atom = nv50_atom(state);  in nv50_disp_atomic_commit_tail()  local
2066 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2073 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2090 nv50_head_flush_clr(head, asyh, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2105 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw); in nv50_disp_atomic_commit_tail()
2109 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2134 if (atom->flush_disable) { in nv50_disp_atomic_commit_tail()
2148 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2213 (!asyw->clr.mask || atom->flush_disable)) in nv50_disp_atomic_commit_tail()
2226 !atom->state.legacy_cursor_update) in nv50_disp_atomic_commit_tail()
2232 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2345 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder) in nv50_disp_outp_atomic_add() argument
2349 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_outp_atomic_add()
2358 list_add(&outp->head, &atom->outp); in nv50_disp_outp_atomic_add()
2364 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom, in nv50_disp_outp_atomic_check_clr() argument
2375 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2376 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2378 outp = nv50_disp_outp_atomic_add(atom, encoder); in nv50_disp_outp_atomic_check_clr()
2384 atom->flush_disable = true; in nv50_disp_outp_atomic_check_clr()
2387 atom->lock_core = true; in nv50_disp_outp_atomic_check_clr()
2394 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom, in nv50_disp_outp_atomic_check_set() argument
2405 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_set()
2407 outp = nv50_disp_outp_atomic_add(atom, encoder); in nv50_disp_outp_atomic_check_set()
2412 atom->lock_core = true; in nv50_disp_outp_atomic_check_set()
2421 struct nv50_atom *atom = nv50_atom(state); in nv50_disp_atomic_check() local
2458 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state); in nv50_disp_atomic_check()
2462 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state); in nv50_disp_atomic_check()
2471 nv50_crc_atomic_check_outp(atom); in nv50_disp_atomic_check()
2479 struct nv50_atom *atom = nv50_atom(state); in nv50_disp_atomic_state_clear() local
2482 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_state_clear()
2493 struct nv50_atom *atom = nv50_atom(state); in nv50_disp_atomic_state_free() local
2494 drm_atomic_state_default_release(&atom->state); in nv50_disp_atomic_state_free()
2495 kfree(atom); in nv50_disp_atomic_state_free()
2501 struct nv50_atom *atom; in nv50_disp_atomic_state_alloc() local
2502 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) || in nv50_disp_atomic_state_alloc()
2503 drm_atomic_state_init(dev, &atom->state) < 0) { in nv50_disp_atomic_state_alloc()
2504 kfree(atom); in nv50_disp_atomic_state_alloc()
2507 INIT_LIST_HEAD(&atom->outp); in nv50_disp_atomic_state_alloc()
2508 return &atom->state; in nv50_disp_atomic_state_alloc()