Lines Matching refs:dev_priv
94 static bool intel_psr2_enabled(struct drm_i915_private *dev_priv, in intel_psr2_enabled() argument
98 drm_WARN_ON(&dev_priv->drm, crtc_state->dsc.compression_enable && in intel_psr2_enabled()
101 switch (dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK) { in intel_psr2_enabled()
110 static void psr_irq_control(struct drm_i915_private *dev_priv) in psr_irq_control() argument
121 if (INTEL_GEN(dev_priv) >= 12) { in psr_irq_control()
123 imr_reg = TRANS_PSR_IMR(dev_priv->psr.transcoder); in psr_irq_control()
125 trans_shift = dev_priv->psr.transcoder; in psr_irq_control()
130 if (dev_priv->psr.debug & I915_PSR_DEBUG_IRQ) in psr_irq_control()
135 val = intel_de_read(dev_priv, imr_reg); in psr_irq_control()
138 intel_de_write(dev_priv, imr_reg, val); in psr_irq_control()
179 void intel_psr_irq_handler(struct drm_i915_private *dev_priv, u32 psr_iir) in intel_psr_irq_handler() argument
181 enum transcoder cpu_transcoder = dev_priv->psr.transcoder; in intel_psr_irq_handler()
186 if (INTEL_GEN(dev_priv) >= 12) { in intel_psr_irq_handler()
188 imr_reg = TRANS_PSR_IMR(dev_priv->psr.transcoder); in intel_psr_irq_handler()
190 trans_shift = dev_priv->psr.transcoder; in intel_psr_irq_handler()
195 dev_priv->psr.last_entry_attempt = time_ns; in intel_psr_irq_handler()
196 drm_dbg_kms(&dev_priv->drm, in intel_psr_irq_handler()
202 dev_priv->psr.last_exit = time_ns; in intel_psr_irq_handler()
203 drm_dbg_kms(&dev_priv->drm, in intel_psr_irq_handler()
207 if (INTEL_GEN(dev_priv) >= 9) { in intel_psr_irq_handler()
208 u32 val = intel_de_read(dev_priv, in intel_psr_irq_handler()
210 bool psr2_enabled = dev_priv->psr.psr2_enabled; in intel_psr_irq_handler()
212 intel_de_write(dev_priv, PSR_EVENT(cpu_transcoder), in intel_psr_irq_handler()
214 psr_event_print(dev_priv, val, psr2_enabled); in intel_psr_irq_handler()
221 drm_warn(&dev_priv->drm, "[transcoder %s] PSR aux error\n", in intel_psr_irq_handler()
224 dev_priv->psr.irq_aux_error = true; in intel_psr_irq_handler()
234 val = intel_de_read(dev_priv, imr_reg); in intel_psr_irq_handler()
236 intel_de_write(dev_priv, imr_reg, val); in intel_psr_irq_handler()
238 schedule_work(&dev_priv->psr.work); in intel_psr_irq_handler()
296 struct drm_i915_private *dev_priv = in intel_psr_init_dpcd() local
299 if (dev_priv->psr.dp) { in intel_psr_init_dpcd()
300 drm_warn(&dev_priv->drm, in intel_psr_init_dpcd()
310 drm_dbg_kms(&dev_priv->drm, "eDP panel supports PSR version %x\n", in intel_psr_init_dpcd()
314 drm_dbg_kms(&dev_priv->drm, in intel_psr_init_dpcd()
320 drm_dbg_kms(&dev_priv->drm, in intel_psr_init_dpcd()
325 dev_priv->psr.sink_support = true; in intel_psr_init_dpcd()
326 dev_priv->psr.sink_sync_latency = in intel_psr_init_dpcd()
329 dev_priv->psr.dp = intel_dp; in intel_psr_init_dpcd()
331 if (INTEL_GEN(dev_priv) >= 9 && in intel_psr_init_dpcd()
348 dev_priv->psr.sink_psr2_support = y_req && alpm; in intel_psr_init_dpcd()
349 drm_dbg_kms(&dev_priv->drm, "PSR2 %ssupported\n", in intel_psr_init_dpcd()
350 dev_priv->psr.sink_psr2_support ? "" : "not "); in intel_psr_init_dpcd()
352 if (dev_priv->psr.sink_psr2_support) { in intel_psr_init_dpcd()
353 dev_priv->psr.colorimetry_support = in intel_psr_init_dpcd()
355 dev_priv->psr.su_x_granularity = in intel_psr_init_dpcd()
363 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in hsw_psr_setup_aux() local
380 intel_de_write(dev_priv, in hsw_psr_setup_aux()
381 EDP_PSR_AUX_DATA(dev_priv->psr.transcoder, i >> 2), in hsw_psr_setup_aux()
392 intel_de_write(dev_priv, EDP_PSR_AUX_CTL(dev_priv->psr.transcoder), in hsw_psr_setup_aux()
398 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_enable_sink() local
402 if (dev_priv->psr.psr2_enabled) { in intel_psr_enable_sink()
409 if (dev_priv->psr.link_standby) in intel_psr_enable_sink()
412 if (INTEL_GEN(dev_priv) >= 8) in intel_psr_enable_sink()
423 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr1_get_tp_time() local
426 if (INTEL_GEN(dev_priv) >= 11) in intel_psr1_get_tp_time()
429 if (dev_priv->params.psr_safest_params) { in intel_psr1_get_tp_time()
435 if (dev_priv->vbt.psr.tp1_wakeup_time_us == 0) in intel_psr1_get_tp_time()
437 else if (dev_priv->vbt.psr.tp1_wakeup_time_us <= 100) in intel_psr1_get_tp_time()
439 else if (dev_priv->vbt.psr.tp1_wakeup_time_us <= 500) in intel_psr1_get_tp_time()
444 if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us == 0) in intel_psr1_get_tp_time()
446 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 100) in intel_psr1_get_tp_time()
448 else if (dev_priv->vbt.psr.tp2_tp3_wakeup_time_us <= 500) in intel_psr1_get_tp_time()
465 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in psr_compute_idle_frames() local
471 idle_frames = max(6, dev_priv->vbt.psr.idle_frames); in psr_compute_idle_frames()
472 idle_frames = max(idle_frames, dev_priv->psr.sink_sync_latency + 1); in psr_compute_idle_frames()
474 if (drm_WARN_ON(&dev_priv->drm, idle_frames > 0xf)) in psr_compute_idle_frames()
482 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in hsw_activate_psr1() local
489 if (IS_HASWELL(dev_priv)) in hsw_activate_psr1()
492 if (dev_priv->psr.link_standby) in hsw_activate_psr1()
497 if (INTEL_GEN(dev_priv) >= 8) in hsw_activate_psr1()
500 val |= (intel_de_read(dev_priv, EDP_PSR_CTL(dev_priv->psr.transcoder)) & in hsw_activate_psr1()
502 intel_de_write(dev_priv, EDP_PSR_CTL(dev_priv->psr.transcoder), val); in hsw_activate_psr1()
507 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr2_get_tp_time() local
510 if (dev_priv->params.psr_safest_params) in intel_psr2_get_tp_time()
513 if (dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 && in intel_psr2_get_tp_time()
514 dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50) in intel_psr2_get_tp_time()
516 else if (dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100) in intel_psr2_get_tp_time()
518 else if (dev_priv->vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500) in intel_psr2_get_tp_time()
528 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in hsw_activate_psr2() local
534 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) in hsw_activate_psr2()
537 val |= EDP_PSR2_FRAME_BEFORE_SU(dev_priv->psr.sink_sync_latency + 1); in hsw_activate_psr2()
540 if (INTEL_GEN(dev_priv) >= 12) { in hsw_activate_psr2()
551 } else if (INTEL_GEN(dev_priv) >= 9) { in hsw_activate_psr2()
556 if (dev_priv->psr.psr2_sel_fetch_enabled) { in hsw_activate_psr2()
558 if (IS_TGL_DISP_REVID(dev_priv, TGL_REVID_A0, TGL_REVID_A0) || in hsw_activate_psr2()
559 IS_RKL_REVID(dev_priv, RKL_REVID_A0, RKL_REVID_A0)) in hsw_activate_psr2()
560 intel_de_rmw(dev_priv, CHICKEN_PAR1_1, in hsw_activate_psr2()
564 intel_de_write(dev_priv, in hsw_activate_psr2()
565 PSR2_MAN_TRK_CTL(dev_priv->psr.transcoder), in hsw_activate_psr2()
567 } else if (HAS_PSR2_SEL_FETCH(dev_priv)) { in hsw_activate_psr2()
568 intel_de_write(dev_priv, in hsw_activate_psr2()
569 PSR2_MAN_TRK_CTL(dev_priv->psr.transcoder), 0); in hsw_activate_psr2()
576 intel_de_write(dev_priv, EDP_PSR_CTL(dev_priv->psr.transcoder), 0); in hsw_activate_psr2()
578 intel_de_write(dev_priv, EDP_PSR2_CTL(dev_priv->psr.transcoder), val); in hsw_activate_psr2()
582 transcoder_has_psr2(struct drm_i915_private *dev_priv, enum transcoder trans) in transcoder_has_psr2() argument
584 if (INTEL_GEN(dev_priv) < 9) in transcoder_has_psr2()
586 else if (INTEL_GEN(dev_priv) >= 12) in transcoder_has_psr2()
601 static void psr2_program_idle_frames(struct drm_i915_private *dev_priv, in psr2_program_idle_frames() argument
607 val = intel_de_read(dev_priv, EDP_PSR2_CTL(dev_priv->psr.transcoder)); in psr2_program_idle_frames()
610 intel_de_write(dev_priv, EDP_PSR2_CTL(dev_priv->psr.transcoder), val); in psr2_program_idle_frames()
613 static void tgl_psr2_enable_dc3co(struct drm_i915_private *dev_priv) in tgl_psr2_enable_dc3co() argument
615 psr2_program_idle_frames(dev_priv, 0); in tgl_psr2_enable_dc3co()
616 intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_DC3CO); in tgl_psr2_enable_dc3co()
619 static void tgl_psr2_disable_dc3co(struct drm_i915_private *dev_priv) in tgl_psr2_disable_dc3co() argument
621 struct intel_dp *intel_dp = dev_priv->psr.dp; in tgl_psr2_disable_dc3co()
623 intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6); in tgl_psr2_disable_dc3co()
624 psr2_program_idle_frames(dev_priv, psr_compute_idle_frames(intel_dp)); in tgl_psr2_disable_dc3co()
629 struct drm_i915_private *dev_priv = in tgl_dc3co_disable_work() local
630 container_of(work, typeof(*dev_priv), psr.dc3co_work.work); in tgl_dc3co_disable_work()
632 mutex_lock(&dev_priv->psr.lock); in tgl_dc3co_disable_work()
634 if (delayed_work_pending(&dev_priv->psr.dc3co_work)) in tgl_dc3co_disable_work()
637 tgl_psr2_disable_dc3co(dev_priv); in tgl_dc3co_disable_work()
639 mutex_unlock(&dev_priv->psr.lock); in tgl_dc3co_disable_work()
642 static void tgl_disallow_dc3co_on_psr2_exit(struct drm_i915_private *dev_priv) in tgl_disallow_dc3co_on_psr2_exit() argument
644 if (!dev_priv->psr.dc3co_enabled) in tgl_disallow_dc3co_on_psr2_exit()
647 cancel_delayed_work(&dev_priv->psr.dc3co_work); in tgl_disallow_dc3co_on_psr2_exit()
649 tgl_psr2_disable_dc3co(dev_priv); in tgl_disallow_dc3co_on_psr2_exit()
658 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in tgl_dc3co_exitline_compute_config() local
661 if (!(dev_priv->csr.allowed_dc_mask & DC_STATE_EN_DC3CO)) in tgl_dc3co_exitline_compute_config()
676 if (drm_WARN_ON(&dev_priv->drm, exit_scanlines > crtc_vdisplay)) in tgl_dc3co_exitline_compute_config()
686 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr2_sel_fetch_config_valid() local
691 if (!dev_priv->params.enable_psr2_sel_fetch) { in intel_psr2_sel_fetch_config_valid()
692 drm_dbg_kms(&dev_priv->drm, in intel_psr2_sel_fetch_config_valid()
698 drm_dbg_kms(&dev_priv->drm, in intel_psr2_sel_fetch_config_valid()
705 drm_dbg_kms(&dev_priv->drm, in intel_psr2_sel_fetch_config_valid()
717 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr2_config_valid() local
722 if (!dev_priv->psr.sink_psr2_support) in intel_psr2_config_valid()
725 if (!transcoder_has_psr2(dev_priv, crtc_state->cpu_transcoder)) { in intel_psr2_config_valid()
726 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
738 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
744 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
749 if (INTEL_GEN(dev_priv) >= 12) { in intel_psr2_config_valid()
753 } else if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) { in intel_psr2_config_valid()
757 } else if (IS_GEN(dev_priv, 9)) { in intel_psr2_config_valid()
764 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
776 if (crtc_hdisplay % dev_priv->psr.su_x_granularity) { in intel_psr2_config_valid()
777 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
779 crtc_hdisplay, dev_priv->psr.su_x_granularity); in intel_psr2_config_valid()
783 if (HAS_PSR2_SEL_FETCH(dev_priv)) { in intel_psr2_config_valid()
785 !HAS_PSR_HW_TRACKING(dev_priv)) { in intel_psr2_config_valid()
786 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
794 drm_dbg_kms(&dev_priv->drm, in intel_psr2_config_valid()
809 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_compute_config() local
814 if (!CAN_PSR(dev_priv)) in intel_psr_compute_config()
817 if (intel_dp != dev_priv->psr.dp) in intel_psr_compute_config()
820 if (!psr_global_enabled(dev_priv)) in intel_psr_compute_config()
829 drm_dbg_kms(&dev_priv->drm, in intel_psr_compute_config()
834 if (dev_priv->psr.sink_not_reliable) { in intel_psr_compute_config()
835 drm_dbg_kms(&dev_priv->drm, in intel_psr_compute_config()
841 drm_dbg_kms(&dev_priv->drm, in intel_psr_compute_config()
848 drm_dbg_kms(&dev_priv->drm, in intel_psr_compute_config()
856 drm_dbg_kms(&dev_priv->drm, in intel_psr_compute_config()
869 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_activate() local
871 if (transcoder_has_psr2(dev_priv, dev_priv->psr.transcoder)) in intel_psr_activate()
872 drm_WARN_ON(&dev_priv->drm, in intel_psr_activate()
873 intel_de_read(dev_priv, EDP_PSR2_CTL(dev_priv->psr.transcoder)) & EDP_PSR2_ENABLE); in intel_psr_activate()
875 drm_WARN_ON(&dev_priv->drm, in intel_psr_activate()
876 intel_de_read(dev_priv, EDP_PSR_CTL(dev_priv->psr.transcoder)) & EDP_PSR_ENABLE); in intel_psr_activate()
877 drm_WARN_ON(&dev_priv->drm, dev_priv->psr.active); in intel_psr_activate()
878 lockdep_assert_held(&dev_priv->psr.lock); in intel_psr_activate()
881 if (dev_priv->psr.psr2_enabled) in intel_psr_activate()
886 dev_priv->psr.active = true; in intel_psr_activate()
892 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_enable_source() local
899 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) in intel_psr_enable_source()
902 if (dev_priv->psr.psr2_enabled && (IS_GEN(dev_priv, 9) && in intel_psr_enable_source()
903 !IS_GEMINILAKE(dev_priv))) { in intel_psr_enable_source()
905 u32 chicken = intel_de_read(dev_priv, reg); in intel_psr_enable_source()
909 intel_de_write(dev_priv, reg, chicken); in intel_psr_enable_source()
923 if (INTEL_GEN(dev_priv) < 11) in intel_psr_enable_source()
926 intel_de_write(dev_priv, EDP_PSR_DEBUG(dev_priv->psr.transcoder), in intel_psr_enable_source()
929 psr_irq_control(dev_priv); in intel_psr_enable_source()
938 val = intel_de_read(dev_priv, EXITLINE(cpu_transcoder)); in intel_psr_enable_source()
942 intel_de_write(dev_priv, EXITLINE(cpu_transcoder), val); in intel_psr_enable_source()
945 if (HAS_PSR_HW_TRACKING(dev_priv)) in intel_psr_enable_source()
946 intel_de_rmw(dev_priv, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING, in intel_psr_enable_source()
947 dev_priv->psr.psr2_sel_fetch_enabled ? in intel_psr_enable_source()
951 static void intel_psr_enable_locked(struct drm_i915_private *dev_priv, in intel_psr_enable_locked() argument
955 struct intel_dp *intel_dp = dev_priv->psr.dp; in intel_psr_enable_locked()
960 drm_WARN_ON(&dev_priv->drm, dev_priv->psr.enabled); in intel_psr_enable_locked()
962 dev_priv->psr.psr2_enabled = intel_psr2_enabled(dev_priv, crtc_state); in intel_psr_enable_locked()
963 dev_priv->psr.busy_frontbuffer_bits = 0; in intel_psr_enable_locked()
964 dev_priv->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe; in intel_psr_enable_locked()
965 dev_priv->psr.dc3co_enabled = !!crtc_state->dc3co_exitline; in intel_psr_enable_locked()
966 dev_priv->psr.transcoder = crtc_state->cpu_transcoder; in intel_psr_enable_locked()
969 dev_priv->psr.dc3co_exit_delay = val; in intel_psr_enable_locked()
970 dev_priv->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch; in intel_psr_enable_locked()
980 if (INTEL_GEN(dev_priv) >= 12) { in intel_psr_enable_locked()
981 val = intel_de_read(dev_priv, in intel_psr_enable_locked()
982 TRANS_PSR_IIR(dev_priv->psr.transcoder)); in intel_psr_enable_locked()
985 val = intel_de_read(dev_priv, EDP_PSR_IIR); in intel_psr_enable_locked()
986 val &= EDP_PSR_ERROR(dev_priv->psr.transcoder); in intel_psr_enable_locked()
989 dev_priv->psr.sink_not_reliable = true; in intel_psr_enable_locked()
990 drm_dbg_kms(&dev_priv->drm, in intel_psr_enable_locked()
995 drm_dbg_kms(&dev_priv->drm, "Enabling PSR%s\n", in intel_psr_enable_locked()
996 dev_priv->psr.psr2_enabled ? "2" : "1"); in intel_psr_enable_locked()
998 &dev_priv->psr.vsc); in intel_psr_enable_locked()
999 intel_write_dp_vsc_sdp(encoder, crtc_state, &dev_priv->psr.vsc); in intel_psr_enable_locked()
1002 dev_priv->psr.enabled = true; in intel_psr_enable_locked()
1019 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_enable() local
1021 if (!CAN_PSR(dev_priv) || dev_priv->psr.dp != intel_dp) in intel_psr_enable()
1024 dev_priv->psr.force_mode_changed = false; in intel_psr_enable()
1029 drm_WARN_ON(&dev_priv->drm, dev_priv->drrs.dp); in intel_psr_enable()
1031 mutex_lock(&dev_priv->psr.lock); in intel_psr_enable()
1033 if (!psr_global_enabled(dev_priv)) { in intel_psr_enable()
1034 drm_dbg_kms(&dev_priv->drm, "PSR disabled by flag\n"); in intel_psr_enable()
1038 intel_psr_enable_locked(dev_priv, crtc_state, conn_state); in intel_psr_enable()
1041 mutex_unlock(&dev_priv->psr.lock); in intel_psr_enable()
1044 static void intel_psr_exit(struct drm_i915_private *dev_priv) in intel_psr_exit() argument
1048 if (!dev_priv->psr.active) { in intel_psr_exit()
1049 if (transcoder_has_psr2(dev_priv, dev_priv->psr.transcoder)) { in intel_psr_exit()
1050 val = intel_de_read(dev_priv, in intel_psr_exit()
1051 EDP_PSR2_CTL(dev_priv->psr.transcoder)); in intel_psr_exit()
1052 drm_WARN_ON(&dev_priv->drm, val & EDP_PSR2_ENABLE); in intel_psr_exit()
1055 val = intel_de_read(dev_priv, in intel_psr_exit()
1056 EDP_PSR_CTL(dev_priv->psr.transcoder)); in intel_psr_exit()
1057 drm_WARN_ON(&dev_priv->drm, val & EDP_PSR_ENABLE); in intel_psr_exit()
1062 if (dev_priv->psr.psr2_enabled) { in intel_psr_exit()
1063 tgl_disallow_dc3co_on_psr2_exit(dev_priv); in intel_psr_exit()
1064 val = intel_de_read(dev_priv, in intel_psr_exit()
1065 EDP_PSR2_CTL(dev_priv->psr.transcoder)); in intel_psr_exit()
1066 drm_WARN_ON(&dev_priv->drm, !(val & EDP_PSR2_ENABLE)); in intel_psr_exit()
1068 intel_de_write(dev_priv, in intel_psr_exit()
1069 EDP_PSR2_CTL(dev_priv->psr.transcoder), val); in intel_psr_exit()
1071 val = intel_de_read(dev_priv, in intel_psr_exit()
1072 EDP_PSR_CTL(dev_priv->psr.transcoder)); in intel_psr_exit()
1073 drm_WARN_ON(&dev_priv->drm, !(val & EDP_PSR_ENABLE)); in intel_psr_exit()
1075 intel_de_write(dev_priv, in intel_psr_exit()
1076 EDP_PSR_CTL(dev_priv->psr.transcoder), val); in intel_psr_exit()
1078 dev_priv->psr.active = false; in intel_psr_exit()
1083 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_disable_locked() local
1087 lockdep_assert_held(&dev_priv->psr.lock); in intel_psr_disable_locked()
1089 if (!dev_priv->psr.enabled) in intel_psr_disable_locked()
1092 drm_dbg_kms(&dev_priv->drm, "Disabling PSR%s\n", in intel_psr_disable_locked()
1093 dev_priv->psr.psr2_enabled ? "2" : "1"); in intel_psr_disable_locked()
1095 intel_psr_exit(dev_priv); in intel_psr_disable_locked()
1097 if (dev_priv->psr.psr2_enabled) { in intel_psr_disable_locked()
1098 psr_status = EDP_PSR2_STATUS(dev_priv->psr.transcoder); in intel_psr_disable_locked()
1101 psr_status = EDP_PSR_STATUS(dev_priv->psr.transcoder); in intel_psr_disable_locked()
1106 if (intel_de_wait_for_clear(dev_priv, psr_status, in intel_psr_disable_locked()
1108 drm_err(&dev_priv->drm, "Timed out waiting PSR idle state\n"); in intel_psr_disable_locked()
1111 if (dev_priv->psr.psr2_sel_fetch_enabled && in intel_psr_disable_locked()
1112 (IS_TGL_DISP_REVID(dev_priv, TGL_REVID_A0, TGL_REVID_A0) || in intel_psr_disable_locked()
1113 IS_RKL_REVID(dev_priv, RKL_REVID_A0, RKL_REVID_A0))) in intel_psr_disable_locked()
1114 intel_de_rmw(dev_priv, CHICKEN_PAR1_1, in intel_psr_disable_locked()
1120 if (dev_priv->psr.psr2_enabled) in intel_psr_disable_locked()
1123 dev_priv->psr.enabled = false; in intel_psr_disable_locked()
1136 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_disable() local
1141 if (drm_WARN_ON(&dev_priv->drm, !CAN_PSR(dev_priv))) in intel_psr_disable()
1144 mutex_lock(&dev_priv->psr.lock); in intel_psr_disable()
1148 mutex_unlock(&dev_priv->psr.lock); in intel_psr_disable()
1149 cancel_work_sync(&dev_priv->psr.work); in intel_psr_disable()
1150 cancel_delayed_work_sync(&dev_priv->psr.dc3co_work); in intel_psr_disable()
1153 static void psr_force_hw_tracking_exit(struct drm_i915_private *dev_priv) in psr_force_hw_tracking_exit() argument
1155 if (INTEL_GEN(dev_priv) >= 9) in psr_force_hw_tracking_exit()
1165 intel_de_write(dev_priv, CURSURFLIVE(dev_priv->psr.pipe), 0); in psr_force_hw_tracking_exit()
1171 intel_psr_exit(dev_priv); in psr_force_hw_tracking_exit()
1177 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); in intel_psr2_program_trans_man_trk_ctl() local
1178 struct i915_psr *psr = &dev_priv->psr; in intel_psr2_program_trans_man_trk_ctl()
1180 if (!HAS_PSR2_SEL_FETCH(dev_priv) || in intel_psr2_program_trans_man_trk_ctl()
1184 intel_de_write(dev_priv, PSR2_MAN_TRK_CTL(psr->transcoder), in intel_psr2_program_trans_man_trk_ctl()
1214 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_update() local
1215 struct i915_psr *psr = &dev_priv->psr; in intel_psr_update()
1218 if (!CAN_PSR(dev_priv) || READ_ONCE(psr->dp) != intel_dp) in intel_psr_update()
1221 dev_priv->psr.force_mode_changed = false; in intel_psr_update()
1223 mutex_lock(&dev_priv->psr.lock); in intel_psr_update()
1225 enable = crtc_state->has_psr && psr_global_enabled(dev_priv); in intel_psr_update()
1226 psr2_enable = intel_psr2_enabled(dev_priv, crtc_state); in intel_psr_update()
1231 psr_force_hw_tracking_exit(dev_priv); in intel_psr_update()
1232 else if (INTEL_GEN(dev_priv) < 9 && psr->enabled) { in intel_psr_update()
1237 if (!dev_priv->psr.active && in intel_psr_update()
1238 !dev_priv->psr.busy_frontbuffer_bits) in intel_psr_update()
1239 schedule_work(&dev_priv->psr.work); in intel_psr_update()
1249 intel_psr_enable_locked(dev_priv, crtc_state, conn_state); in intel_psr_update()
1252 mutex_unlock(&dev_priv->psr.lock); in intel_psr_update()
1269 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); in intel_psr_wait_for_idle() local
1271 if (!dev_priv->psr.enabled || !new_crtc_state->has_psr) in intel_psr_wait_for_idle()
1275 if (READ_ONCE(dev_priv->psr.psr2_enabled)) in intel_psr_wait_for_idle()
1285 return __intel_wait_for_register(&dev_priv->uncore, in intel_psr_wait_for_idle()
1286 EDP_PSR_STATUS(dev_priv->psr.transcoder), in intel_psr_wait_for_idle()
1292 static bool __psr_wait_for_idle_locked(struct drm_i915_private *dev_priv) in __psr_wait_for_idle_locked() argument
1298 if (!dev_priv->psr.enabled) in __psr_wait_for_idle_locked()
1301 if (dev_priv->psr.psr2_enabled) { in __psr_wait_for_idle_locked()
1302 reg = EDP_PSR2_STATUS(dev_priv->psr.transcoder); in __psr_wait_for_idle_locked()
1305 reg = EDP_PSR_STATUS(dev_priv->psr.transcoder); in __psr_wait_for_idle_locked()
1309 mutex_unlock(&dev_priv->psr.lock); in __psr_wait_for_idle_locked()
1311 err = intel_de_wait_for_clear(dev_priv, reg, mask, 50); in __psr_wait_for_idle_locked()
1313 drm_err(&dev_priv->drm, in __psr_wait_for_idle_locked()
1317 mutex_lock(&dev_priv->psr.lock); in __psr_wait_for_idle_locked()
1318 return err == 0 && dev_priv->psr.enabled; in __psr_wait_for_idle_locked()
1321 static int intel_psr_fastset_force(struct drm_i915_private *dev_priv) in intel_psr_fastset_force() argument
1323 struct drm_device *dev = &dev_priv->drm; in intel_psr_fastset_force()
1370 int intel_psr_debug_set(struct drm_i915_private *dev_priv, u64 val) in intel_psr_debug_set() argument
1378 drm_dbg_kms(&dev_priv->drm, "Invalid debug mask %llx\n", val); in intel_psr_debug_set()
1382 ret = mutex_lock_interruptible(&dev_priv->psr.lock); in intel_psr_debug_set()
1386 old_mode = dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK; in intel_psr_debug_set()
1387 dev_priv->psr.debug = val; in intel_psr_debug_set()
1393 if (dev_priv->psr.enabled) in intel_psr_debug_set()
1394 psr_irq_control(dev_priv); in intel_psr_debug_set()
1396 mutex_unlock(&dev_priv->psr.lock); in intel_psr_debug_set()
1399 ret = intel_psr_fastset_force(dev_priv); in intel_psr_debug_set()
1404 static void intel_psr_handle_irq(struct drm_i915_private *dev_priv) in intel_psr_handle_irq() argument
1406 struct i915_psr *psr = &dev_priv->psr; in intel_psr_handle_irq()
1416 struct drm_i915_private *dev_priv = in intel_psr_work() local
1417 container_of(work, typeof(*dev_priv), psr.work); in intel_psr_work()
1419 mutex_lock(&dev_priv->psr.lock); in intel_psr_work()
1421 if (!dev_priv->psr.enabled) in intel_psr_work()
1424 if (READ_ONCE(dev_priv->psr.irq_aux_error)) in intel_psr_work()
1425 intel_psr_handle_irq(dev_priv); in intel_psr_work()
1433 if (!__psr_wait_for_idle_locked(dev_priv)) in intel_psr_work()
1441 if (dev_priv->psr.busy_frontbuffer_bits || dev_priv->psr.active) in intel_psr_work()
1444 intel_psr_activate(dev_priv->psr.dp); in intel_psr_work()
1446 mutex_unlock(&dev_priv->psr.lock); in intel_psr_work()
1462 void intel_psr_invalidate(struct drm_i915_private *dev_priv, in intel_psr_invalidate() argument
1465 if (!CAN_PSR(dev_priv)) in intel_psr_invalidate()
1471 mutex_lock(&dev_priv->psr.lock); in intel_psr_invalidate()
1472 if (!dev_priv->psr.enabled) { in intel_psr_invalidate()
1473 mutex_unlock(&dev_priv->psr.lock); in intel_psr_invalidate()
1477 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(dev_priv->psr.pipe); in intel_psr_invalidate()
1478 dev_priv->psr.busy_frontbuffer_bits |= frontbuffer_bits; in intel_psr_invalidate()
1481 intel_psr_exit(dev_priv); in intel_psr_invalidate()
1483 mutex_unlock(&dev_priv->psr.lock); in intel_psr_invalidate()
1493 tgl_dc3co_flush(struct drm_i915_private *dev_priv, in tgl_dc3co_flush() argument
1496 mutex_lock(&dev_priv->psr.lock); in tgl_dc3co_flush()
1498 if (!dev_priv->psr.dc3co_enabled) in tgl_dc3co_flush()
1501 if (!dev_priv->psr.psr2_enabled || !dev_priv->psr.active) in tgl_dc3co_flush()
1509 INTEL_FRONTBUFFER_ALL_MASK(dev_priv->psr.pipe))) in tgl_dc3co_flush()
1512 tgl_psr2_enable_dc3co(dev_priv); in tgl_dc3co_flush()
1513 mod_delayed_work(system_wq, &dev_priv->psr.dc3co_work, in tgl_dc3co_flush()
1514 dev_priv->psr.dc3co_exit_delay); in tgl_dc3co_flush()
1517 mutex_unlock(&dev_priv->psr.lock); in tgl_dc3co_flush()
1533 void intel_psr_flush(struct drm_i915_private *dev_priv, in intel_psr_flush() argument
1536 if (!CAN_PSR(dev_priv)) in intel_psr_flush()
1540 tgl_dc3co_flush(dev_priv, frontbuffer_bits, origin); in intel_psr_flush()
1544 mutex_lock(&dev_priv->psr.lock); in intel_psr_flush()
1545 if (!dev_priv->psr.enabled) { in intel_psr_flush()
1546 mutex_unlock(&dev_priv->psr.lock); in intel_psr_flush()
1550 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(dev_priv->psr.pipe); in intel_psr_flush()
1551 dev_priv->psr.busy_frontbuffer_bits &= ~frontbuffer_bits; in intel_psr_flush()
1555 psr_force_hw_tracking_exit(dev_priv); in intel_psr_flush()
1557 if (!dev_priv->psr.active && !dev_priv->psr.busy_frontbuffer_bits) in intel_psr_flush()
1558 schedule_work(&dev_priv->psr.work); in intel_psr_flush()
1559 mutex_unlock(&dev_priv->psr.lock); in intel_psr_flush()
1569 void intel_psr_init(struct drm_i915_private *dev_priv) in intel_psr_init() argument
1571 if (!HAS_PSR(dev_priv)) in intel_psr_init()
1574 if (!dev_priv->psr.sink_support) in intel_psr_init()
1577 if (IS_HASWELL(dev_priv)) in intel_psr_init()
1583 dev_priv->hsw_psr_mmio_adjust = _SRD_CTL_EDP - _HSW_EDP_PSR_BASE; in intel_psr_init()
1585 if (dev_priv->params.enable_psr == -1) in intel_psr_init()
1586 if (INTEL_GEN(dev_priv) < 9 || !dev_priv->vbt.psr.enable) in intel_psr_init()
1587 dev_priv->params.enable_psr = 0; in intel_psr_init()
1590 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) in intel_psr_init()
1592 dev_priv->psr.link_standby = false; in intel_psr_init()
1593 else if (INTEL_GEN(dev_priv) < 12) in intel_psr_init()
1595 dev_priv->psr.link_standby = dev_priv->vbt.psr.full_link; in intel_psr_init()
1597 INIT_WORK(&dev_priv->psr.work, intel_psr_work); in intel_psr_init()
1598 INIT_DELAYED_WORK(&dev_priv->psr.dc3co_work, tgl_dc3co_disable_work); in intel_psr_init()
1599 mutex_init(&dev_priv->psr.lock); in intel_psr_init()
1623 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in psr_alpm_check() local
1625 struct i915_psr *psr = &dev_priv->psr; in psr_alpm_check()
1634 drm_err(&dev_priv->drm, "Error reading ALPM status\n"); in psr_alpm_check()
1641 drm_dbg_kms(&dev_priv->drm, in psr_alpm_check()
1651 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in psr_capability_changed_check() local
1652 struct i915_psr *psr = &dev_priv->psr; in psr_capability_changed_check()
1658 drm_err(&dev_priv->drm, "Error reading DP_PSR_ESI\n"); in psr_capability_changed_check()
1665 drm_dbg_kms(&dev_priv->drm, in psr_capability_changed_check()
1675 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_short_pulse() local
1676 struct i915_psr *psr = &dev_priv->psr; in intel_psr_short_pulse()
1682 if (!CAN_PSR(dev_priv) || !intel_dp_is_edp(intel_dp)) in intel_psr_short_pulse()
1691 drm_err(&dev_priv->drm, in intel_psr_short_pulse()
1702 drm_dbg_kms(&dev_priv->drm, in intel_psr_short_pulse()
1705 drm_dbg_kms(&dev_priv->drm, in intel_psr_short_pulse()
1708 drm_dbg_kms(&dev_priv->drm, in intel_psr_short_pulse()
1711 drm_dbg_kms(&dev_priv->drm, in intel_psr_short_pulse()
1715 drm_err(&dev_priv->drm, in intel_psr_short_pulse()
1730 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); in intel_psr_enabled() local
1733 if (!CAN_PSR(dev_priv) || !intel_dp_is_edp(intel_dp)) in intel_psr_enabled()
1736 mutex_lock(&dev_priv->psr.lock); in intel_psr_enabled()
1737 ret = (dev_priv->psr.dp == intel_dp && dev_priv->psr.enabled); in intel_psr_enabled()
1738 mutex_unlock(&dev_priv->psr.lock); in intel_psr_enabled()
1747 struct drm_i915_private *dev_priv = to_i915(connector->dev); in intel_psr_atomic_check() local
1752 if (!CAN_PSR(dev_priv) || !new_state->crtc || in intel_psr_atomic_check()
1753 !dev_priv->psr.force_mode_changed) in intel_psr_atomic_check()
1758 if (dev_priv->psr.dp != &dig_port->dp) in intel_psr_atomic_check()
1768 struct drm_i915_private *dev_priv; in intel_psr_set_force_mode_changed() local
1773 dev_priv = dp_to_i915(intel_dp); in intel_psr_set_force_mode_changed()
1774 if (!CAN_PSR(dev_priv) || intel_dp != dev_priv->psr.dp) in intel_psr_set_force_mode_changed()
1777 dev_priv->psr.force_mode_changed = true; in intel_psr_set_force_mode_changed()