Lines Matching +full:dp +full:- +full:aux +full:- +full:bus
31 #include <linux/dma-mapping.h>
69 #include <subdev/bios/dp.h>
83 chan->device = device; in nv50_chan_create()
94 &chan->user); in nv50_chan_create()
96 nvif_object_map(&chan->user, NULL, 0); in nv50_chan_create()
105 return -ENOSYS; in nv50_chan_create()
111 nvif_object_dtor(&chan->user); in nv50_chan_destroy()
121 nvif_object_dtor(&dmac->vram); in nv50_dmac_destroy()
122 nvif_object_dtor(&dmac->sync); in nv50_dmac_destroy()
124 nv50_chan_destroy(&dmac->base); in nv50_dmac_destroy()
126 nvif_mem_dtor(&dmac->_push.mem); in nv50_dmac_destroy()
134 dmac->cur = push->cur - (u32 *)dmac->_push.mem.object.map.ptr; in nv50_dmac_kick()
135 if (dmac->put != dmac->cur) { in nv50_dmac_kick()
139 if (dmac->push->mem.type & NVIF_MEM_VRAM) { in nv50_dmac_kick()
140 struct nvif_device *device = dmac->base.device; in nv50_dmac_kick()
141 nvif_wr32(&device->object, 0x070000, 0x00000001); in nv50_dmac_kick()
143 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002)) in nv50_dmac_kick()
148 NVIF_WV32(&dmac->base.user, NV507C, PUT, PTR, dmac->cur); in nv50_dmac_kick()
149 dmac->put = dmac->cur; in nv50_dmac_kick()
152 push->bgn = push->cur; in nv50_dmac_kick()
158 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR); in nv50_dmac_free()
159 if (get > dmac->cur) /* NVIDIA stay 5 away from GET, do the same. */ in nv50_dmac_free()
160 return get - dmac->cur - 5; in nv50_dmac_free()
161 return dmac->max - dmac->cur; in nv50_dmac_free()
170 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR); in nv50_dmac_wind()
172 /* Corner-case, HW idle, but non-committed work pending. */ in nv50_dmac_wind()
173 if (dmac->put == 0) in nv50_dmac_wind()
174 nv50_dmac_kick(dmac->push); in nv50_dmac_wind()
176 if (nvif_msec(dmac->base.device, 2000, in nv50_dmac_wind()
177 if (NVIF_TV32(&dmac->base.user, NV507C, GET, PTR, >, 0)) in nv50_dmac_wind()
180 return -ETIMEDOUT; in nv50_dmac_wind()
183 PUSH_RSVD(dmac->push, PUSH_JUMP(dmac->push, 0)); in nv50_dmac_wind()
184 dmac->cur = 0; in nv50_dmac_wind()
194 if (WARN_ON(size > dmac->max)) in nv50_dmac_wait()
195 return -EINVAL; in nv50_dmac_wait()
197 dmac->cur = push->cur - (u32 *)dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
198 if (dmac->cur + size >= dmac->max) { in nv50_dmac_wait()
203 push->cur = dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
204 push->cur = push->cur + dmac->cur; in nv50_dmac_wait()
208 if (nvif_msec(dmac->base.device, 2000, in nv50_dmac_wait()
213 return -ETIMEDOUT; in nv50_dmac_wait()
216 push->bgn = dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
217 push->bgn = push->bgn + dmac->cur; in nv50_dmac_wait()
218 push->cur = push->bgn; in nv50_dmac_wait()
219 push->end = push->cur + free; in nv50_dmac_wait()
224 static int nv50_dmac_vram_pushbuf = -1;
232 struct nouveau_cli *cli = (void *)device->object.client; in nv50_dmac_create()
237 mutex_init(&dmac->lock); in nv50_dmac_create()
239 /* Pascal added support for 47-bit physical addresses, but some in nv50_dmac_create()
240 * parts of EVO still only accept 40-bit PAs. in nv50_dmac_create()
249 (nv50_dmac_vram_pushbuf < 0 && device->info.family == NV_DEVICE_INFO_V0_PASCAL)) in nv50_dmac_create()
252 ret = nvif_mem_ctor_map(&cli->mmu, "kmsChanPush", type, 0x1000, in nv50_dmac_create()
253 &dmac->_push.mem); in nv50_dmac_create()
257 dmac->ptr = dmac->_push.mem.object.map.ptr; in nv50_dmac_create()
258 dmac->_push.wait = nv50_dmac_wait; in nv50_dmac_create()
259 dmac->_push.kick = nv50_dmac_kick; in nv50_dmac_create()
260 dmac->push = &dmac->_push; in nv50_dmac_create()
261 dmac->push->bgn = dmac->_push.mem.object.map.ptr; in nv50_dmac_create()
262 dmac->push->cur = dmac->push->bgn; in nv50_dmac_create()
263 dmac->push->end = dmac->push->bgn; in nv50_dmac_create()
264 dmac->max = 0x1000/4 - 1; in nv50_dmac_create()
269 if (disp->oclass < GV100_DISP) in nv50_dmac_create()
270 dmac->max -= 12; in nv50_dmac_create()
272 args->pushbuf = nvif_handle(&dmac->_push.mem.object); in nv50_dmac_create()
275 &dmac->base); in nv50_dmac_create()
282 ret = nvif_object_ctor(&dmac->base.user, "kmsSyncCtxDma", NV50_DISP_HANDLE_SYNCBUF, in nv50_dmac_create()
290 &dmac->sync); in nv50_dmac_create()
294 ret = nvif_object_ctor(&dmac->base.user, "kmsVramCtxDma", NV50_DISP_HANDLE_VRAM, in nv50_dmac_create()
300 .limit = device->info.ram_user - 1, in nv50_dmac_create()
302 &dmac->vram); in nv50_dmac_create()
317 outp->base.base.name, outp->caps.dp_interlace); in nv50_outp_dump_caps()
323 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); in nv50_outp_release()
329 .base.hasht = nv_encoder->dcb->hasht, in nv50_outp_release()
330 .base.hashm = nv_encoder->dcb->hashm, in nv50_outp_release()
333 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); in nv50_outp_release()
334 nv_encoder->or = -1; in nv50_outp_release()
335 nv_encoder->link = 0; in nv50_outp_release()
341 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev); in nv50_outp_acquire()
342 struct nv50_disp *disp = nv50_disp(drm->dev); in nv50_outp_acquire()
349 .base.hasht = nv_encoder->dcb->hasht, in nv50_outp_acquire()
350 .base.hashm = nv_encoder->dcb->hashm, in nv50_outp_acquire()
355 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); in nv50_outp_acquire()
361 nv_encoder->or = args.info.or; in nv50_outp_acquire()
362 nv_encoder->link = args.info.link; in nv50_outp_acquire()
372 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode; in nv50_outp_atomic_check_view()
373 struct drm_display_mode *mode = &crtc_state->mode; in nv50_outp_atomic_check_view()
374 struct drm_connector *connector = conn_state->connector; in nv50_outp_atomic_check_view()
376 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_outp_atomic_check_view()
378 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name); in nv50_outp_atomic_check_view()
379 asyc->scaler.full = false; in nv50_outp_atomic_check_view()
383 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) { in nv50_outp_atomic_check_view()
384 switch (connector->connector_type) { in nv50_outp_atomic_check_view()
391 if (mode->hdisplay == native_mode->hdisplay && in nv50_outp_atomic_check_view()
392 mode->vdisplay == native_mode->vdisplay && in nv50_outp_atomic_check_view()
393 mode->type & DRM_MODE_TYPE_DRIVER) in nv50_outp_atomic_check_view()
396 asyc->scaler.full = true; in nv50_outp_atomic_check_view()
407 crtc_state->mode_changed = true; in nv50_outp_atomic_check_view()
418 struct drm_connector *connector = conn_state->connector; in nv50_outp_atomic_check()
424 nv_connector->native_mode); in nv50_outp_atomic_check()
428 if (crtc_state->mode_changed || crtc_state->connectors_changed) in nv50_outp_atomic_check()
429 asyh->or.bpc = connector->display_info.bpc; in nv50_outp_atomic_check()
443 if (connector_state->best_encoder == encoder) in nv50_outp_get_new_connector()
459 if (connector_state->best_encoder == encoder) in nv50_outp_get_old_connector()
471 const u32 mask = drm_encoder_mask(&outp->base.base); in nv50_outp_get_new_crtc()
475 if (crtc_state->encoder_mask & mask) in nv50_outp_get_new_crtc()
489 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_dac_atomic_disable()
492 core->func->dac->ctrl(core, nv_encoder->or, ctrl, NULL); in nv50_dac_atomic_disable()
493 nv_encoder->crtc = NULL; in nv50_dac_atomic_disable()
503 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_dac_atomic_enable()
504 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_dac_atomic_enable()
507 switch (nv_crtc->index) { in nv50_dac_atomic_enable()
521 core->func->dac->ctrl(core, nv_encoder->or, ctrl, asyh); in nv50_dac_atomic_enable()
522 asyh->or.depth = 0; in nv50_dac_atomic_enable()
524 nv_encoder->crtc = &nv_crtc->base; in nv50_dac_atomic_enable()
534 loadval = nouveau_drm(encoder->dev)->vbios.dactestval; in nv50_dac_detect()
538 ret = nvif_outp_load_detect(&nv_encoder->outp, loadval); in nv50_dac_detect()
558 nvif_outp_dtor(&nv_encoder->outp); in nv50_dac_destroy()
572 struct nouveau_drm *drm = nouveau_drm(connector->dev); in nv50_dac_create()
573 struct nv50_disp *disp = nv50_disp(connector->dev); in nv50_dac_create()
574 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_dac_create()
575 struct nvkm_i2c_bus *bus; in nv50_dac_create() local
582 return -ENOMEM; in nv50_dac_create()
583 nv_encoder->dcb = dcbe; in nv50_dac_create()
585 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index); in nv50_dac_create()
586 if (bus) in nv50_dac_create()
587 nv_encoder->i2c = &bus->i2c; in nv50_dac_create()
590 encoder->possible_crtcs = dcbe->heads; in nv50_dac_create()
591 encoder->possible_clones = 0; in nv50_dac_create()
592 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, in nv50_dac_create()
593 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_dac_create()
597 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp); in nv50_dac_create()
607 if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify) in nv50_audio_component_eld_notify()
608 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr, in nv50_audio_component_eld_notify()
625 mutex_lock(&drm->audio.lock); in nv50_audio_component_get_eld()
627 drm_for_each_encoder(encoder, drm->dev) { in nv50_audio_component_get_eld()
630 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) in nv50_audio_component_get_eld()
634 nv_connector = nouveau_connector(nv_encoder->audio.connector); in nv50_audio_component_get_eld()
635 nv_crtc = nouveau_crtc(nv_encoder->crtc); in nv50_audio_component_get_eld()
637 if (!nv_crtc || nv_encoder->or != port || nv_crtc->index != dev_id) in nv50_audio_component_get_eld()
640 *enabled = nv_encoder->audio.enabled; in nv50_audio_component_get_eld()
642 ret = drm_eld_size(nv_connector->base.eld); in nv50_audio_component_get_eld()
643 memcpy(buf, nv_connector->base.eld, in nv50_audio_component_get_eld()
649 mutex_unlock(&drm->audio.lock); in nv50_audio_component_get_eld()
667 return -ENOMEM; in nv50_audio_component_bind()
670 acomp->ops = &nv50_audio_component_ops; in nv50_audio_component_bind()
671 acomp->dev = kdev; in nv50_audio_component_bind()
672 drm->audio.component = acomp; in nv50_audio_component_bind()
686 drm->audio.component = NULL; in nv50_audio_component_unbind()
687 acomp->ops = NULL; in nv50_audio_component_unbind()
688 acomp->dev = NULL; in nv50_audio_component_unbind()
700 if (component_add(drm->dev->dev, &nv50_audio_component_bind_ops)) in nv50_audio_component_init()
703 drm->audio.component_registered = true; in nv50_audio_component_init()
704 mutex_init(&drm->audio.lock); in nv50_audio_component_init()
710 if (!drm->audio.component_registered) in nv50_audio_component_fini()
713 component_del(drm->dev->dev, &nv50_audio_component_bind_ops); in nv50_audio_component_fini()
714 drm->audio.component_registered = false; in nv50_audio_component_fini()
715 mutex_destroy(&drm->audio.lock); in nv50_audio_component_fini()
724 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_audio_disable()
726 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_audio_disable()
733 .base.hasht = nv_encoder->dcb->hasht, in nv50_audio_disable()
734 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | in nv50_audio_disable()
735 (0x0100 << nv_crtc->index), in nv50_audio_disable()
738 mutex_lock(&drm->audio.lock); in nv50_audio_disable()
739 if (nv_encoder->audio.enabled) { in nv50_audio_disable()
740 nv_encoder->audio.enabled = false; in nv50_audio_disable()
741 nv_encoder->audio.connector = NULL; in nv50_audio_disable()
742 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); in nv50_audio_disable()
744 mutex_unlock(&drm->audio.lock); in nv50_audio_disable()
746 nv50_audio_component_eld_notify(drm->audio.component, nv_encoder->or, in nv50_audio_disable()
747 nv_crtc->index); in nv50_audio_disable()
755 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_audio_enable()
757 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_audio_enable()
763 u8 data[sizeof(nv_connector->base.eld)]; in nv50_audio_enable()
767 .base.mthd.hasht = nv_encoder->dcb->hasht, in nv50_audio_enable()
768 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) | in nv50_audio_enable()
769 (0x0100 << nv_crtc->index), in nv50_audio_enable()
772 if (!drm_detect_monitor_audio(nv_connector->edid)) in nv50_audio_enable()
775 mutex_lock(&drm->audio.lock); in nv50_audio_enable()
777 memcpy(args.data, nv_connector->base.eld, sizeof(args.data)); in nv50_audio_enable()
779 nvif_mthd(&disp->disp->object, 0, &args, in nv50_audio_enable()
781 nv_encoder->audio.enabled = true; in nv50_audio_enable()
782 nv_encoder->audio.connector = &nv_connector->base; in nv50_audio_enable()
784 mutex_unlock(&drm->audio.lock); in nv50_audio_enable()
786 nv50_audio_component_eld_notify(drm->audio.component, nv_encoder->or, in nv50_audio_enable()
787 nv_crtc->index); in nv50_audio_enable()
797 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_hdmi_disable()
804 .base.hasht = nv_encoder->dcb->hasht, in nv50_hdmi_disable()
805 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | in nv50_hdmi_disable()
806 (0x0100 << nv_crtc->index), in nv50_hdmi_disable()
809 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); in nv50_hdmi_disable()
817 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_hdmi_enable()
819 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_hdmi_enable()
827 .base.hasht = nv_encoder->dcb->hasht, in nv50_hdmi_enable()
828 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | in nv50_hdmi_enable()
829 (0x0100 << nv_crtc->index), in nv50_hdmi_enable()
842 if (!drm_detect_hdmi_monitor(nv_connector->edid)) in nv50_hdmi_enable()
845 hdmi = &nv_connector->base.display_info.hdmi; in nv50_hdmi_enable()
848 &nv_connector->base, mode); in nv50_hdmi_enable()
851 &nv_connector->base, mode, in nv50_hdmi_enable()
859 &nv_connector->base, mode); in nv50_hdmi_enable()
869 max_ac_packet = mode->htotal - mode->hdisplay; in nv50_hdmi_enable()
870 max_ac_packet -= args.pwr.rekey; in nv50_hdmi_enable()
871 max_ac_packet -= 18; /* constant from tegra */ in nv50_hdmi_enable()
874 if (hdmi->scdc.scrambling.supported) { in nv50_hdmi_enable()
875 high_tmds_clock_ratio = mode->clock > 340000; in nv50_hdmi_enable()
877 hdmi->scdc.scrambling.low_rates; in nv50_hdmi_enable()
888 nvif_mthd(&disp->disp->object, 0, &args, size); in nv50_hdmi_enable()
895 if (!hdmi->scdc.scrambling.supported) in nv50_hdmi_enable()
898 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &config); in nv50_hdmi_enable()
906 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, config); in nv50_hdmi_enable()
942 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) in nv50_real_outp()
946 if (!msto->mstc) in nv50_real_outp()
948 return msto->mstc->mstm->outp; in nv50_real_outp()
957 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); in nv50_msto_cleanup()
959 drm_atomic_get_mst_payload_state(mst_state, msto->mstc->port); in nv50_msto_cleanup()
961 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name); in nv50_msto_cleanup()
963 if (msto->disabled) { in nv50_msto_cleanup()
964 msto->mstc = NULL; in nv50_msto_cleanup()
965 msto->disabled = false; in nv50_msto_cleanup()
966 } else if (msto->enabled) { in nv50_msto_cleanup()
968 msto->enabled = false; in nv50_msto_cleanup()
978 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); in nv50_msto_prepare()
979 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_prepare()
980 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_prepare()
988 .base.hasht = mstm->outp->dcb->hasht, in nv50_msto_prepare()
989 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) | in nv50_msto_prepare()
990 (0x0100 << msto->head->base.index), in nv50_msto_prepare()
993 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name); in nv50_msto_prepare()
995 payload = drm_atomic_get_mst_payload_state(mst_state, mstc->port); in nv50_msto_prepare()
998 if (msto->disabled) { in nv50_msto_prepare()
1001 if (msto->enabled) in nv50_msto_prepare()
1004 args.vcpi.start_slot = payload->vc_start_slot; in nv50_msto_prepare()
1005 args.vcpi.num_slots = payload->time_slots; in nv50_msto_prepare()
1006 args.vcpi.pbn = payload->pbn; in nv50_msto_prepare()
1007 args.vcpi.aligned_pbn = payload->time_slots * mst_state->pbn_div; in nv50_msto_prepare()
1011 msto->encoder.name, msto->head->base.base.name, in nv50_msto_prepare()
1015 nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args)); in nv50_msto_prepare()
1023 struct drm_atomic_state *state = crtc_state->state; in nv50_msto_atomic_check()
1024 struct drm_connector *connector = conn_state->connector; in nv50_msto_atomic_check()
1027 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_atomic_check()
1033 mstc->native); in nv50_msto_atomic_check()
1045 if (!state->duplicated) { in nv50_msto_atomic_check()
1046 const int clock = crtc_state->adjusted_mode.clock; in nv50_msto_atomic_check()
1048 asyh->or.bpc = connector->display_info.bpc; in nv50_msto_atomic_check()
1049 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3, in nv50_msto_atomic_check()
1053 mst_state = drm_atomic_get_mst_topology_state(state, &mstm->mgr); in nv50_msto_atomic_check()
1057 if (!mst_state->pbn_div) { in nv50_msto_atomic_check()
1058 struct nouveau_encoder *outp = mstc->mstm->outp; in nv50_msto_atomic_check()
1060 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&mstm->mgr, in nv50_msto_atomic_check()
1061 outp->dp.link_bw, outp->dp.link_nr); in nv50_msto_atomic_check()
1064 slots = drm_dp_atomic_find_time_slots(state, &mstm->mgr, mstc->port, asyh->dp.pbn); in nv50_msto_atomic_check()
1068 asyh->dp.tu = slots; in nv50_msto_atomic_check()
1088 struct nv50_head *head = msto->head; in nv50_msto_atomic_enable()
1090 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &head->base.base)); in nv50_msto_atomic_enable()
1097 drm_connector_list_iter_begin(encoder->dev, &conn_iter); in nv50_msto_atomic_enable()
1099 if (connector->state->best_encoder == &msto->encoder) { in nv50_msto_atomic_enable()
1101 mstm = mstc->mstm; in nv50_msto_atomic_enable()
1110 if (!mstm->links++) in nv50_msto_atomic_enable()
1111 nv50_outp_acquire(mstm->outp, false /*XXX: MST audio.*/); in nv50_msto_atomic_enable()
1113 if (mstm->outp->link & 1) in nv50_msto_atomic_enable()
1118 mstm->outp->update(mstm->outp, head->base.index, asyh, proto, in nv50_msto_atomic_enable()
1119 nv50_dp_bpc_to_depth(asyh->or.bpc)); in nv50_msto_atomic_enable()
1121 msto->mstc = mstc; in nv50_msto_atomic_enable()
1122 msto->enabled = true; in nv50_msto_atomic_enable()
1123 mstm->modified = true; in nv50_msto_atomic_enable()
1130 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_atomic_disable()
1131 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_atomic_disable()
1133 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0); in nv50_msto_atomic_disable()
1134 mstm->modified = true; in nv50_msto_atomic_disable()
1135 if (!--mstm->links) in nv50_msto_atomic_disable()
1136 mstm->disabled = true; in nv50_msto_atomic_disable()
1137 msto->disabled = true; in nv50_msto_atomic_disable()
1151 drm_encoder_cleanup(&msto->encoder); in nv50_msto_destroy()
1168 return ERR_PTR(-ENOMEM); in nv50_msto_new()
1170 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto, in nv50_msto_new()
1171 DRM_MODE_ENCODER_DPMST, "mst-%d", id); in nv50_msto_new()
1177 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help); in nv50_msto_new()
1178 msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base); in nv50_msto_new()
1179 msto->head = head; in nv50_msto_new()
1190 struct drm_crtc *crtc = connector_state->crtc; in nv50_mstc_atomic_best_encoder()
1192 if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc))) in nv50_mstc_atomic_best_encoder()
1195 return &nv50_head(crtc)->msto->encoder; in nv50_mstc_atomic_best_encoder()
1203 struct nouveau_encoder *outp = mstc->mstm->outp; in nv50_mstc_mode_valid()
1218 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port); in nv50_mstc_get_modes()
1219 drm_connector_update_edid_property(&mstc->connector, mstc->edid); in nv50_mstc_get_modes()
1220 if (mstc->edid) in nv50_mstc_get_modes()
1221 ret = drm_add_edid_modes(&mstc->connector, mstc->edid); in nv50_mstc_get_modes()
1229 if (connector->display_info.bpc) in nv50_mstc_get_modes()
1230 connector->display_info.bpc = in nv50_mstc_get_modes()
1231 clamp(connector->display_info.bpc, 6U, 8U); in nv50_mstc_get_modes()
1233 connector->display_info.bpc = 8; in nv50_mstc_get_modes()
1235 if (mstc->native) in nv50_mstc_get_modes()
1236 drm_mode_destroy(mstc->connector.dev, mstc->native); in nv50_mstc_get_modes()
1237 mstc->native = nouveau_conn_native_mode(&mstc->connector); in nv50_mstc_get_modes()
1246 struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr; in nv50_mstc_atomic_check()
1248 return drm_dp_atomic_release_time_slots(state, mgr, mstc->port); in nv50_mstc_atomic_check()
1261 ret = pm_runtime_get_sync(connector->dev->dev); in nv50_mstc_detect()
1262 if (ret < 0 && ret != -EACCES) { in nv50_mstc_detect()
1263 pm_runtime_put_autosuspend(connector->dev->dev); in nv50_mstc_detect()
1267 ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr, in nv50_mstc_detect()
1268 mstc->port); in nv50_mstc_detect()
1273 pm_runtime_mark_last_busy(connector->dev->dev); in nv50_mstc_detect()
1274 pm_runtime_put_autosuspend(connector->dev->dev); in nv50_mstc_detect()
1292 drm_connector_cleanup(&mstc->connector); in nv50_mstc_destroy()
1293 drm_dp_mst_put_port_malloc(mstc->port); in nv50_mstc_destroy()
1313 struct drm_device *dev = mstm->outp->base.base.dev; in nv50_mstc_new()
1319 return -ENOMEM; in nv50_mstc_new()
1320 mstc->mstm = mstm; in nv50_mstc_new()
1321 mstc->port = port; in nv50_mstc_new()
1323 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc, in nv50_mstc_new()
1331 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help); in nv50_mstc_new()
1333 mstc->connector.funcs->reset(&mstc->connector); in nv50_mstc_new()
1334 nouveau_conn_attach_properties(&mstc->connector); in nv50_mstc_new()
1337 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc))) in nv50_mstc_new()
1340 drm_connector_attach_encoder(&mstc->connector, in nv50_mstc_new()
1341 &nv50_head(crtc)->msto->encoder); in nv50_mstc_new()
1344 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0); in nv50_mstc_new()
1345 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0); in nv50_mstc_new()
1346 drm_connector_set_path_property(&mstc->connector, path); in nv50_mstc_new()
1356 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); in nv50_mstm_cleanup()
1359 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name); in nv50_mstm_cleanup()
1360 drm_dp_check_act_status(&mstm->mgr); in nv50_mstm_cleanup()
1362 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_cleanup()
1363 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_cleanup()
1365 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_cleanup()
1366 if (mstc && mstc->mstm == mstm) in nv50_mstm_cleanup()
1367 nv50_msto_cleanup(state, mst_state, &mstm->mgr, msto); in nv50_mstm_cleanup()
1371 mstm->modified = false; in nv50_mstm_cleanup()
1379 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); in nv50_mstm_prepare()
1382 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name); in nv50_mstm_prepare()
1385 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_prepare()
1386 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_prepare()
1388 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_prepare()
1389 if (mstc && mstc->mstm == mstm && msto->disabled) in nv50_mstm_prepare()
1390 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto); in nv50_mstm_prepare()
1397 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_prepare()
1398 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_prepare()
1400 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_prepare()
1401 if (mstc && mstc->mstm == mstm && !msto->disabled) in nv50_mstm_prepare()
1402 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto); in nv50_mstm_prepare()
1406 if (mstm->disabled) { in nv50_mstm_prepare()
1407 if (!mstm->links) in nv50_mstm_prepare()
1408 nv50_outp_release(mstm->outp); in nv50_mstm_prepare()
1409 mstm->disabled = false; in nv50_mstm_prepare()
1425 return &mstc->connector; in nv50_mstm_add_connector()
1438 struct drm_dp_aux *aux = &nv_connector->aux; in nv50_mstm_service() local
1444 rc = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8); in nv50_mstm_service()
1450 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled); in nv50_mstm_service()
1454 rc = drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], in nv50_mstm_service()
1464 nv_connector->base.name, rc); in nv50_mstm_service()
1472 mstm->is_mst = false; in nv50_mstm_remove()
1473 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); in nv50_mstm_remove()
1479 struct nouveau_encoder *outp = mstm->outp; in nv50_mstm_enable()
1486 .base.hasht = outp->dcb->hasht, in nv50_mstm_enable()
1487 .base.hashm = outp->dcb->hashm, in nv50_mstm_enable()
1490 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev); in nv50_mstm_enable()
1491 struct nvif_object *disp = &drm->display->disp.object; in nv50_mstm_enable()
1499 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_detect()
1500 struct drm_dp_aux *aux; in nv50_mstm_detect() local
1503 if (!mstm || !mstm->can_mst) in nv50_mstm_detect()
1506 aux = mstm->mgr.aux; in nv50_mstm_detect()
1511 ret = drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0); in nv50_mstm_detect()
1520 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, true); in nv50_mstm_detect()
1526 mstm->is_mst = true; in nv50_mstm_detect()
1533 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_fini()
1540 * path to protect mstm->is_mst without potentially deadlocking in nv50_mstm_fini()
1542 mutex_lock(&outp->dp.hpd_irq_lock); in nv50_mstm_fini()
1543 mstm->suspended = true; in nv50_mstm_fini()
1544 mutex_unlock(&outp->dp.hpd_irq_lock); in nv50_mstm_fini()
1546 if (mstm->is_mst) in nv50_mstm_fini()
1547 drm_dp_mst_topology_mgr_suspend(&mstm->mgr); in nv50_mstm_fini()
1553 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_init()
1559 if (mstm->is_mst) { in nv50_mstm_init()
1560 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime); in nv50_mstm_init()
1561 if (ret == -1) in nv50_mstm_init()
1565 mutex_lock(&outp->dp.hpd_irq_lock); in nv50_mstm_init()
1566 mstm->suspended = false; in nv50_mstm_init()
1567 mutex_unlock(&outp->dp.hpd_irq_lock); in nv50_mstm_init()
1569 if (ret == -1) in nv50_mstm_init()
1570 drm_kms_helper_hotplug_event(mstm->mgr.dev); in nv50_mstm_init()
1578 drm_dp_mst_topology_mgr_destroy(&mstm->mgr); in nv50_mstm_del()
1585 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max, in nv50_mstm_new() argument
1588 const int max_payloads = hweight8(outp->dcb->heads); in nv50_mstm_new()
1589 struct drm_device *dev = outp->base.base.dev; in nv50_mstm_new()
1594 return -ENOMEM; in nv50_mstm_new()
1595 mstm->outp = outp; in nv50_mstm_new()
1596 mstm->mgr.cbs = &nv50_mstm; in nv50_mstm_new()
1598 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max, in nv50_mstm_new()
1613 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); in nv50_sor_update()
1614 struct nv50_core *core = disp->core; in nv50_sor_update()
1617 nv_encoder->ctrl &= ~BIT(head); in nv50_sor_update()
1618 if (NVDEF_TEST(nv_encoder->ctrl, NV507D, SOR_SET_CONTROL, OWNER, ==, NONE)) in nv50_sor_update()
1619 nv_encoder->ctrl = 0; in nv50_sor_update()
1621 nv_encoder->ctrl |= NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto); in nv50_sor_update()
1622 nv_encoder->ctrl |= BIT(head); in nv50_sor_update()
1623 asyh->or.depth = depth; in nv50_sor_update()
1626 core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh); in nv50_sor_update()
1629 /* TODO: Should we extend this to PWM-only backlights?
1638 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); in nv50_sor_atomic_disable()
1641 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev); in nv50_sor_atomic_disable()
1642 struct nouveau_backlight *backlight = nv_connector->backlight; in nv50_sor_atomic_disable()
1644 struct drm_dp_aux *aux = &nv_connector->aux; in nv50_sor_atomic_disable() local
1649 if (backlight && backlight->uses_dpcd) { in nv50_sor_atomic_disable()
1650 ret = drm_edp_backlight_disable(aux, &backlight->edp_info); in nv50_sor_atomic_disable()
1653 nv_connector->base.base.id, nv_connector->base.name, ret); in nv50_sor_atomic_disable()
1657 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { in nv50_sor_atomic_disable()
1658 ret = drm_dp_dpcd_readb(aux, DP_SET_POWER, &pwr); in nv50_sor_atomic_disable()
1663 drm_dp_dpcd_writeb(aux, DP_SET_POWER, pwr); in nv50_sor_atomic_disable()
1667 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0); in nv50_sor_atomic_disable()
1669 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc); in nv50_sor_atomic_disable()
1671 nv_encoder->crtc = NULL; in nv50_sor_atomic_disable()
1680 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_sor_atomic_enable()
1681 struct drm_display_mode *mode = &asyh->state.adjusted_mode; in nv50_sor_atomic_enable()
1688 .base.hasht = nv_encoder->dcb->hasht, in nv50_sor_atomic_enable()
1689 .base.hashm = nv_encoder->dcb->hashm, in nv50_sor_atomic_enable()
1691 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_sor_atomic_enable()
1692 struct drm_device *dev = encoder->dev; in nv50_sor_atomic_enable()
1698 struct nvbios *bios = &drm->vbios; in nv50_sor_atomic_enable()
1704 nv_encoder->crtc = &nv_crtc->base; in nv50_sor_atomic_enable()
1706 if ((disp->disp->object.oclass == GT214_DISP || in nv50_sor_atomic_enable()
1707 disp->disp->object.oclass >= GF110_DISP) && in nv50_sor_atomic_enable()
1708 drm_detect_monitor_audio(nv_connector->edid)) in nv50_sor_atomic_enable()
1712 switch (nv_encoder->dcb->type) { in nv50_sor_atomic_enable()
1714 if (nv_encoder->link & 1) { in nv50_sor_atomic_enable()
1716 /* Only enable dual-link if: in nv50_sor_atomic_enable()
1717 * - Need to (i.e. rate > 165MHz) in nv50_sor_atomic_enable()
1718 * - DCB says we can in nv50_sor_atomic_enable()
1719 * - Not an HDMI monitor, since there's no dual-link in nv50_sor_atomic_enable()
1722 if (mode->clock >= 165000 && in nv50_sor_atomic_enable()
1723 nv_encoder->dcb->duallink_possible && in nv50_sor_atomic_enable()
1724 !drm_detect_hdmi_monitor(nv_connector->edid)) in nv50_sor_atomic_enable()
1730 nv50_hdmi_enable(&nv_encoder->base.base, nv_crtc, nv_connector, state, mode); in nv50_sor_atomic_enable()
1735 if (bios->fp_no_ddc) { in nv50_sor_atomic_enable()
1736 if (bios->fp.dual_link) in nv50_sor_atomic_enable()
1738 if (bios->fp.if_is_24bit) in nv50_sor_atomic_enable()
1741 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { in nv50_sor_atomic_enable()
1742 if (((u8 *)nv_connector->edid)[121] == 2) in nv50_sor_atomic_enable()
1745 if (mode->clock >= bios->fp.duallink_transition_clk) { in nv50_sor_atomic_enable()
1750 if (bios->fp.strapless_is_24bit & 2) in nv50_sor_atomic_enable()
1753 if (bios->fp.strapless_is_24bit & 1) in nv50_sor_atomic_enable()
1757 if (asyh->or.bpc == 8) in nv50_sor_atomic_enable()
1761 nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds)); in nv50_sor_atomic_enable()
1764 depth = nv50_dp_bpc_to_depth(asyh->or.bpc); in nv50_sor_atomic_enable()
1766 if (nv_encoder->link & 1) in nv50_sor_atomic_enable()
1774 backlight = nv_connector->backlight; in nv50_sor_atomic_enable()
1775 if (backlight && backlight->uses_dpcd) in nv50_sor_atomic_enable()
1776 drm_edp_backlight_enable(&nv_connector->aux, &backlight->edp_info, in nv50_sor_atomic_enable()
1777 (u16)backlight->dev->props.brightness); in nv50_sor_atomic_enable()
1786 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth); in nv50_sor_atomic_enable()
1801 nvif_outp_dtor(&nv_encoder->outp); in nv50_sor_destroy()
1803 nv50_mstm_del(&nv_encoder->dp.mstm); in nv50_sor_destroy()
1806 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) in nv50_sor_destroy()
1807 mutex_destroy(&nv_encoder->dp.hpd_irq_lock); in nv50_sor_destroy()
1819 struct nvkm_bios *bios = nvxx_bios(&drm->client.device); in nv50_has_mst()
1831 struct nouveau_drm *drm = nouveau_drm(connector->dev); in nv50_sor_create()
1832 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_sor_create()
1835 struct nv50_disp *disp = nv50_disp(connector->dev); in nv50_sor_create()
1838 switch (dcbe->type) { in nv50_sor_create()
1849 return -ENOMEM; in nv50_sor_create()
1850 nv_encoder->dcb = dcbe; in nv50_sor_create()
1851 nv_encoder->update = nv50_sor_update; in nv50_sor_create()
1854 encoder->possible_crtcs = dcbe->heads; in nv50_sor_create()
1855 encoder->possible_clones = 0; in nv50_sor_create()
1856 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, in nv50_sor_create()
1857 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_sor_create()
1862 disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1); in nv50_sor_create()
1865 if (dcbe->type == DCB_OUTPUT_DP) { in nv50_sor_create()
1866 struct nvkm_i2c_aux *aux = in nv50_sor_create() local
1867 nvkm_i2c_aux_find(i2c, dcbe->i2c_index); in nv50_sor_create()
1869 mutex_init(&nv_encoder->dp.hpd_irq_lock); in nv50_sor_create()
1871 if (aux) { in nv50_sor_create()
1872 if (disp->disp->object.oclass < GF110_DISP) { in nv50_sor_create()
1873 /* HW has no support for address-only in nv50_sor_create()
1875 * use custom I2C-over-AUX code. in nv50_sor_create()
1877 nv_encoder->i2c = &aux->i2c; in nv50_sor_create()
1879 nv_encoder->i2c = &nv_connector->aux.ddc; in nv50_sor_create()
1881 nv_encoder->aux = aux; in nv50_sor_create()
1884 if (nv_connector->type != DCB_CONNECTOR_eDP && in nv50_sor_create()
1886 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, in nv50_sor_create()
1887 16, nv_connector->base.base.id, in nv50_sor_create()
1888 &nv_encoder->dp.mstm); in nv50_sor_create()
1893 struct nvkm_i2c_bus *bus = in nv50_sor_create() local
1894 nvkm_i2c_bus_find(i2c, dcbe->i2c_index); in nv50_sor_create()
1895 if (bus) in nv50_sor_create()
1896 nv_encoder->i2c = &bus->i2c; in nv50_sor_create()
1899 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp); in nv50_sor_create()
1913 crtc_state->adjusted_mode.clock *= 2; in nv50_pior_atomic_check()
1921 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_pior_atomic_disable()
1924 core->func->pior->ctrl(core, nv_encoder->or, ctrl, NULL); in nv50_pior_atomic_disable()
1925 nv_encoder->crtc = NULL; in nv50_pior_atomic_disable()
1935 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_pior_atomic_enable()
1936 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_pior_atomic_enable()
1939 switch (nv_crtc->index) { in nv50_pior_atomic_enable()
1949 switch (asyh->or.bpc) { in nv50_pior_atomic_enable()
1950 case 10: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444; break; in nv50_pior_atomic_enable()
1951 case 8: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444; break; in nv50_pior_atomic_enable()
1952 case 6: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444; break; in nv50_pior_atomic_enable()
1953 default: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT; break; in nv50_pior_atomic_enable()
1956 switch (nv_encoder->dcb->type) { in nv50_pior_atomic_enable()
1966 core->func->pior->ctrl(core, nv_encoder->or, ctrl, asyh); in nv50_pior_atomic_enable()
1967 nv_encoder->crtc = &nv_crtc->base; in nv50_pior_atomic_enable()
1982 nvif_outp_dtor(&nv_encoder->outp); in nv50_pior_destroy()
1996 struct drm_device *dev = connector->dev; in nv50_pior_create()
1999 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_pior_create()
2000 struct nvkm_i2c_bus *bus = NULL; in nv50_pior_create() local
2001 struct nvkm_i2c_aux *aux = NULL; in nv50_pior_create() local
2007 switch (dcbe->type) { in nv50_pior_create()
2009 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev)); in nv50_pior_create()
2010 ddc = bus ? &bus->i2c : NULL; in nv50_pior_create()
2014 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev)); in nv50_pior_create()
2015 ddc = aux ? &aux->i2c : NULL; in nv50_pior_create()
2019 return -ENODEV; in nv50_pior_create()
2024 return -ENOMEM; in nv50_pior_create()
2025 nv_encoder->dcb = dcbe; in nv50_pior_create()
2026 nv_encoder->i2c = ddc; in nv50_pior_create()
2027 nv_encoder->aux = aux; in nv50_pior_create()
2030 encoder->possible_crtcs = dcbe->heads; in nv50_pior_create()
2031 encoder->possible_clones = 0; in nv50_pior_create()
2032 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, in nv50_pior_create()
2033 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_pior_create()
2038 disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1); in nv50_pior_create()
2041 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp); in nv50_pior_create()
2053 struct nouveau_drm *drm = nouveau_drm(state->dev); in nv50_disp_atomic_commit_core()
2054 struct nv50_disp *disp = nv50_disp(drm->dev); in nv50_disp_atomic_commit_core()
2055 struct nv50_core *core = disp->core; in nv50_disp_atomic_commit_core()
2063 if (mstm->modified) in nv50_disp_atomic_commit_core()
2067 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY); in nv50_disp_atomic_commit_core()
2068 core->func->update(core, interlock, true); in nv50_disp_atomic_commit_core()
2069 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY, in nv50_disp_atomic_commit_core()
2070 disp->core->chan.base.device)) in nv50_disp_atomic_commit_core()
2075 if (mstm->modified) in nv50_disp_atomic_commit_core()
2089 if (interlock[wndw->interlock.type] & wndw->interlock.data) { in nv50_disp_atomic_commit_wndw()
2090 if (wndw->func->update) in nv50_disp_atomic_commit_wndw()
2091 wndw->func->update(wndw, interlock); in nv50_disp_atomic_commit_wndw()
2099 struct drm_device *dev = state->dev; in nv50_disp_atomic_commit_tail()
2107 struct nv50_core *core = disp->core; in nv50_disp_atomic_commit_tail()
2113 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2121 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2122 mutex_lock(&disp->mutex); in nv50_disp_atomic_commit_tail()
2129 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2130 asyh->clr.mask, asyh->set.mask); in nv50_disp_atomic_commit_tail()
2132 if (old_crtc_state->active && !new_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2133 pm_runtime_put_noidle(dev->dev); in nv50_disp_atomic_commit_tail()
2137 if (asyh->clr.mask) { in nv50_disp_atomic_commit_tail()
2138 nv50_head_flush_clr(head, asyh, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2148 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name, in nv50_disp_atomic_commit_tail()
2149 asyw->clr.mask, asyw->set.mask); in nv50_disp_atomic_commit_tail()
2150 if (!asyw->clr.mask) in nv50_disp_atomic_commit_tail()
2153 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw); in nv50_disp_atomic_commit_tail()
2157 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2161 encoder = outp->encoder; in nv50_disp_atomic_commit_tail()
2162 help = encoder->helper_private; in nv50_disp_atomic_commit_tail()
2164 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name, in nv50_disp_atomic_commit_tail()
2165 outp->clr.mask, outp->set.mask); in nv50_disp_atomic_commit_tail()
2167 if (outp->clr.mask) { in nv50_disp_atomic_commit_tail()
2168 help->atomic_disable(encoder, state); in nv50_disp_atomic_commit_tail()
2170 if (outp->flush_disable) { in nv50_disp_atomic_commit_tail()
2182 if (atom->flush_disable) { in nv50_disp_atomic_commit_tail()
2196 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2200 encoder = outp->encoder; in nv50_disp_atomic_commit_tail()
2201 help = encoder->helper_private; in nv50_disp_atomic_commit_tail()
2203 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name, in nv50_disp_atomic_commit_tail()
2204 outp->set.mask, outp->clr.mask); in nv50_disp_atomic_commit_tail()
2206 if (outp->set.mask) { in nv50_disp_atomic_commit_tail()
2207 help->atomic_enable(encoder, state); in nv50_disp_atomic_commit_tail()
2211 list_del(&outp->head); in nv50_disp_atomic_commit_tail()
2220 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2221 asyh->set.mask, asyh->clr.mask); in nv50_disp_atomic_commit_tail()
2223 if (asyh->set.mask) { in nv50_disp_atomic_commit_tail()
2228 if (new_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2229 if (!old_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2231 pm_runtime_get_noresume(dev->dev); in nv50_disp_atomic_commit_tail()
2233 if (new_crtc_state->event) in nv50_disp_atomic_commit_tail()
2238 /* Update window->head assignment. in nv50_disp_atomic_commit_tail()
2244 * supports non-fixed mappings). in nv50_disp_atomic_commit_tail()
2246 if (core->assign_windows) { in nv50_disp_atomic_commit_tail()
2247 core->func->wndw.owner(core); in nv50_disp_atomic_commit_tail()
2249 core->assign_windows = false; in nv50_disp_atomic_commit_tail()
2271 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2272 asyh->set.mask, asyh->clr.mask); in nv50_disp_atomic_commit_tail()
2274 if (asyh->set.mask) { in nv50_disp_atomic_commit_tail()
2285 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name, in nv50_disp_atomic_commit_tail()
2286 asyw->set.mask, asyw->clr.mask); in nv50_disp_atomic_commit_tail()
2287 if ( !asyw->set.mask && in nv50_disp_atomic_commit_tail()
2288 (!asyw->clr.mask || atom->flush_disable)) in nv50_disp_atomic_commit_tail()
2301 !atom->state.legacy_cursor_update) in nv50_disp_atomic_commit_tail()
2304 disp->core->func->update(disp->core, interlock, false); in nv50_disp_atomic_commit_tail()
2307 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2308 mutex_unlock(&disp->mutex); in nv50_disp_atomic_commit_tail()
2316 NV_ERROR(drm, "%s: timeout\n", plane->name); in nv50_disp_atomic_commit_tail()
2320 if (new_crtc_state->event) { in nv50_disp_atomic_commit_tail()
2323 if (new_crtc_state->active) in nv50_disp_atomic_commit_tail()
2325 spin_lock_irqsave(&crtc->dev->event_lock, flags); in nv50_disp_atomic_commit_tail()
2326 drm_crtc_send_vblank_event(crtc, new_crtc_state->event); in nv50_disp_atomic_commit_tail()
2327 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); in nv50_disp_atomic_commit_tail()
2329 new_crtc_state->event = NULL; in nv50_disp_atomic_commit_tail()
2330 if (new_crtc_state->active) in nv50_disp_atomic_commit_tail()
2345 pm_runtime_mark_last_busy(dev->dev); in nv50_disp_atomic_commit_tail()
2346 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit_tail()
2365 ret = pm_runtime_get_sync(dev->dev); in nv50_disp_atomic_commit()
2366 if (ret < 0 && ret != -EACCES) { in nv50_disp_atomic_commit()
2367 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit()
2375 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work); in nv50_disp_atomic_commit()
2395 if (asyw->set.image) in nv50_disp_atomic_commit()
2405 pm_runtime_get_noresume(dev->dev); in nv50_disp_atomic_commit()
2408 queue_work(system_unbound_wq, &state->commit_work); in nv50_disp_atomic_commit()
2416 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit()
2425 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_outp_atomic_add()
2426 if (outp->encoder == encoder) in nv50_disp_outp_atomic_add()
2432 return ERR_PTR(-ENOMEM); in nv50_disp_outp_atomic_add()
2434 list_add(&outp->head, &atom->outp); in nv50_disp_outp_atomic_add()
2435 outp->encoder = encoder; in nv50_disp_outp_atomic_add()
2443 struct drm_encoder *encoder = old_connector_state->best_encoder; in nv50_disp_outp_atomic_check_clr()
2448 if (!(crtc = old_connector_state->crtc)) in nv50_disp_outp_atomic_check_clr()
2451 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2452 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2453 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { in nv50_disp_outp_atomic_check_clr()
2458 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_disp_outp_atomic_check_clr()
2459 outp->flush_disable = true; in nv50_disp_outp_atomic_check_clr()
2460 atom->flush_disable = true; in nv50_disp_outp_atomic_check_clr()
2462 outp->clr.ctrl = true; in nv50_disp_outp_atomic_check_clr()
2463 atom->lock_core = true; in nv50_disp_outp_atomic_check_clr()
2473 struct drm_encoder *encoder = connector_state->best_encoder; in nv50_disp_outp_atomic_check_set()
2478 if (!(crtc = connector_state->crtc)) in nv50_disp_outp_atomic_check_set()
2481 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_set()
2482 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { in nv50_disp_outp_atomic_check_set()
2487 outp->set.ctrl = true; in nv50_disp_outp_atomic_check_set()
2488 atom->lock_core = true; in nv50_disp_outp_atomic_check_set()
2498 struct nv50_core *core = nv50_disp(dev)->core; in nv50_disp_atomic_check()
2507 if (core->assign_windows && core->func->head->static_wndw_map) { in nv50_disp_atomic_check()
2516 core->func->head->static_wndw_map(head, asyh); in nv50_disp_atomic_check()
2520 /* We need to handle colour management on a per-plane basis. */ in nv50_disp_atomic_check()
2522 if (new_crtc_state->color_mgmt_changed) { in nv50_disp_atomic_check()
2558 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_state_clear()
2559 list_del(&outp->head); in nv50_disp_atomic_state_clear()
2570 drm_atomic_state_default_release(&atom->state); in nv50_disp_atomic_state_free()
2579 drm_atomic_state_init(dev, &atom->state) < 0) { in nv50_disp_atomic_state_alloc()
2583 INIT_LIST_HEAD(&atom->outp); in nv50_disp_atomic_state_alloc()
2584 return &atom->state; in nv50_disp_atomic_state_alloc()
2613 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in nv50_display_fini()
2614 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) in nv50_display_fini()
2619 cancel_work_sync(&drm->hpd_work); in nv50_display_fini()
2625 struct nv50_core *core = nv50_disp(dev)->core; in nv50_display_init()
2629 core->func->init(core); in nv50_display_init()
2631 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in nv50_display_init()
2632 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { in nv50_display_init()
2649 nvif_object_unmap(&disp->caps); in nv50_display_destroy()
2650 nvif_object_dtor(&disp->caps); in nv50_display_destroy()
2651 nv50_core_del(&disp->core); in nv50_display_destroy()
2653 nouveau_bo_unmap(disp->sync); in nv50_display_destroy()
2654 if (disp->sync) in nv50_display_destroy()
2655 nouveau_bo_unpin(disp->sync); in nv50_display_destroy()
2656 nouveau_bo_ref(NULL, &disp->sync); in nv50_display_destroy()
2658 nouveau_display(dev)->priv = NULL; in nv50_display_destroy()
2665 struct nvif_device *device = &nouveau_drm(dev)->client.device; in nv50_display_create()
2667 struct dcb_table *dcb = &drm->vbios.dcb; in nv50_display_create()
2676 return -ENOMEM; in nv50_display_create()
2678 mutex_init(&disp->mutex); in nv50_display_create()
2680 nouveau_display(dev)->priv = disp; in nv50_display_create()
2681 nouveau_display(dev)->dtor = nv50_display_destroy; in nv50_display_create()
2682 nouveau_display(dev)->init = nv50_display_init; in nv50_display_create()
2683 nouveau_display(dev)->fini = nv50_display_fini; in nv50_display_create()
2684 disp->disp = &nouveau_display(dev)->disp; in nv50_display_create()
2685 dev->mode_config.funcs = &nv50_disp_func; in nv50_display_create()
2686 dev->mode_config.helper_private = &nv50_disp_helper_func; in nv50_display_create()
2687 dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true; in nv50_display_create()
2688 dev->mode_config.normalize_zpos = true; in nv50_display_create()
2691 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, in nv50_display_create()
2693 0, 0x0000, NULL, NULL, &disp->sync); in nv50_display_create()
2695 ret = nouveau_bo_pin(disp->sync, NOUVEAU_GEM_DOMAIN_VRAM, true); in nv50_display_create()
2697 ret = nouveau_bo_map(disp->sync); in nv50_display_create()
2699 nouveau_bo_unpin(disp->sync); in nv50_display_create()
2702 nouveau_bo_ref(NULL, &disp->sync); in nv50_display_create()
2709 ret = nv50_core_new(drm, &disp->core); in nv50_display_create()
2713 disp->core->func->init(disp->core); in nv50_display_create()
2714 if (disp->core->func->caps_init) { in nv50_display_create()
2715 ret = disp->core->func->caps_init(drm, disp); in nv50_display_create()
2721 if (disp->disp->object.oclass >= TU102_DISP) in nv50_display_create()
2722 nouveau_display(dev)->format_modifiers = wndwc57e_modifiers; in nv50_display_create()
2724 if (drm->client.device.info.family >= NV_DEVICE_INFO_V0_FERMI) in nv50_display_create()
2725 nouveau_display(dev)->format_modifiers = disp90xx_modifiers; in nv50_display_create()
2727 nouveau_display(dev)->format_modifiers = disp50xx_modifiers; in nv50_display_create()
2734 * But until then, just limit cursors to 128x128 - which is small enough to avoid ever using in nv50_display_create()
2737 if (disp->disp->object.oclass >= GM107_DISP) { in nv50_display_create()
2738 dev->mode_config.cursor_width = 256; in nv50_display_create()
2739 dev->mode_config.cursor_height = 256; in nv50_display_create()
2740 } else if (disp->disp->object.oclass >= GK104_DISP) { in nv50_display_create()
2741 dev->mode_config.cursor_width = 128; in nv50_display_create()
2742 dev->mode_config.cursor_height = 128; in nv50_display_create()
2744 dev->mode_config.cursor_width = 64; in nv50_display_create()
2745 dev->mode_config.cursor_height = 64; in nv50_display_create()
2749 if (disp->disp->object.oclass >= GV100_DISP) in nv50_display_create()
2750 crtcs = nvif_rd32(&device->object, 0x610060) & 0xff; in nv50_display_create()
2752 if (disp->disp->object.oclass >= GF110_DISP) in nv50_display_create()
2753 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf; in nv50_display_create()
2770 head->msto = nv50_msto_new(dev, head, i); in nv50_display_create()
2771 if (IS_ERR(head->msto)) { in nv50_display_create()
2772 ret = PTR_ERR(head->msto); in nv50_display_create()
2773 head->msto = NULL; in nv50_display_create()
2787 head->msto->encoder.possible_crtcs = crtcs; in nv50_display_create()
2792 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { in nv50_display_create()
2797 if (dcbe->location == DCB_LOC_ON_CHIP) { in nv50_display_create()
2798 switch (dcbe->type) { in nv50_display_create()
2808 ret = -ENODEV; in nv50_display_create()
2817 dcbe->location, dcbe->type, in nv50_display_create()
2818 ffs(dcbe->or) - 1, ret); in nv50_display_create()
2824 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { in nv50_display_create()
2825 if (connector->possible_encoders) in nv50_display_create()
2829 connector->name); in nv50_display_create()
2830 connector->funcs->destroy(connector); in nv50_display_create()
2833 /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */ in nv50_display_create()
2834 dev->vblank_disable_immediate = true; in nv50_display_create()
2849 * Log2(block height) ----------------------------+ *
2850 * Page Kind ----------------------------------+ | *
2851 * Gob Height/Page Kind Generation ------+ | | *
2852 * Sector layout -------+ | | | *
2853 * Compression ------+ | | | | */
2878 * Log2(block height) ----------------------------+ *
2879 * Page Kind ----------------------------------+ | *
2880 * Gob Height/Page Kind Generation ------+ | | *
2881 * Sector layout -------+ | | | *
2882 * Compression ------+ | | | | */