Lines Matching refs:dev_priv
86 enum hpd_pin intel_hpd_pin_default(struct drm_i915_private *dev_priv, in intel_hpd_pin_default() argument
139 static bool intel_hpd_irq_storm_detect(struct drm_i915_private *dev_priv, in intel_hpd_irq_storm_detect() argument
142 struct i915_hotplug *hpd = &dev_priv->hotplug; in intel_hpd_irq_storm_detect()
150 (!long_hpd && !dev_priv->hotplug.hpd_short_storm_enabled)) in intel_hpd_irq_storm_detect()
161 drm_dbg_kms(&dev_priv->drm, in intel_hpd_irq_storm_detect()
165 drm_dbg_kms(&dev_priv->drm, in intel_hpd_irq_storm_detect()
175 intel_hpd_irq_storm_switch_to_polling(struct drm_i915_private *dev_priv) in intel_hpd_irq_storm_switch_to_polling() argument
177 struct drm_device *dev = &dev_priv->drm; in intel_hpd_irq_storm_switch_to_polling()
182 lockdep_assert_held(&dev_priv->irq_lock); in intel_hpd_irq_storm_switch_to_polling()
193 dev_priv->hotplug.stats[pin].state != HPD_MARK_DISABLED) in intel_hpd_irq_storm_switch_to_polling()
196 drm_info(&dev_priv->drm, in intel_hpd_irq_storm_switch_to_polling()
201 dev_priv->hotplug.stats[pin].state = HPD_DISABLED; in intel_hpd_irq_storm_switch_to_polling()
211 mod_delayed_work(system_wq, &dev_priv->hotplug.reenable_work, in intel_hpd_irq_storm_switch_to_polling()
218 struct drm_i915_private *dev_priv = in intel_hpd_irq_storm_reenable_work() local
219 container_of(work, typeof(*dev_priv), in intel_hpd_irq_storm_reenable_work()
221 struct drm_device *dev = &dev_priv->drm; in intel_hpd_irq_storm_reenable_work()
227 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm); in intel_hpd_irq_storm_reenable_work()
229 spin_lock_irq(&dev_priv->irq_lock); in intel_hpd_irq_storm_reenable_work()
235 dev_priv->hotplug.stats[pin].state != HPD_DISABLED) in intel_hpd_irq_storm_reenable_work()
239 drm_dbg(&dev_priv->drm, in intel_hpd_irq_storm_reenable_work()
247 if (dev_priv->hotplug.stats[pin].state == HPD_DISABLED) in intel_hpd_irq_storm_reenable_work()
248 dev_priv->hotplug.stats[pin].state = HPD_ENABLED; in intel_hpd_irq_storm_reenable_work()
251 if (dev_priv->display_irqs_enabled && dev_priv->display.hpd_irq_setup) in intel_hpd_irq_storm_reenable_work()
252 dev_priv->display.hpd_irq_setup(dev_priv); in intel_hpd_irq_storm_reenable_work()
254 spin_unlock_irq(&dev_priv->irq_lock); in intel_hpd_irq_storm_reenable_work()
256 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref); in intel_hpd_irq_storm_reenable_work()
299 struct drm_i915_private *dev_priv = in i915_digport_work_func() local
305 spin_lock_irq(&dev_priv->irq_lock); in i915_digport_work_func()
306 long_port_mask = dev_priv->hotplug.long_port_mask; in i915_digport_work_func()
307 dev_priv->hotplug.long_port_mask = 0; in i915_digport_work_func()
308 short_port_mask = dev_priv->hotplug.short_port_mask; in i915_digport_work_func()
309 dev_priv->hotplug.short_port_mask = 0; in i915_digport_work_func()
310 spin_unlock_irq(&dev_priv->irq_lock); in i915_digport_work_func()
312 for_each_intel_encoder(&dev_priv->drm, encoder) { in i915_digport_work_func()
337 spin_lock_irq(&dev_priv->irq_lock); in i915_digport_work_func()
338 dev_priv->hotplug.event_bits |= old_bits; in i915_digport_work_func()
339 spin_unlock_irq(&dev_priv->irq_lock); in i915_digport_work_func()
340 queue_delayed_work(system_wq, &dev_priv->hotplug.hotplug_work, 0); in i915_digport_work_func()
367 struct drm_i915_private *dev_priv = in i915_hotplug_work_func() local
370 struct drm_device *dev = &dev_priv->drm; in i915_hotplug_work_func()
378 drm_dbg_kms(&dev_priv->drm, "running encoder hotplug functions\n"); in i915_hotplug_work_func()
380 spin_lock_irq(&dev_priv->irq_lock); in i915_hotplug_work_func()
382 hpd_event_bits = dev_priv->hotplug.event_bits; in i915_hotplug_work_func()
383 dev_priv->hotplug.event_bits = 0; in i915_hotplug_work_func()
384 hpd_retry_bits = dev_priv->hotplug.retry_bits; in i915_hotplug_work_func()
385 dev_priv->hotplug.retry_bits = 0; in i915_hotplug_work_func()
388 intel_hpd_irq_storm_switch_to_polling(dev_priv); in i915_hotplug_work_func()
390 spin_unlock_irq(&dev_priv->irq_lock); in i915_hotplug_work_func()
411 drm_dbg_kms(&dev_priv->drm, in i915_hotplug_work_func()
437 spin_lock_irq(&dev_priv->irq_lock); in i915_hotplug_work_func()
438 dev_priv->hotplug.retry_bits |= retry; in i915_hotplug_work_func()
439 spin_unlock_irq(&dev_priv->irq_lock); in i915_hotplug_work_func()
441 mod_delayed_work(system_wq, &dev_priv->hotplug.hotplug_work, in i915_hotplug_work_func()
463 void intel_hpd_irq_handler(struct drm_i915_private *dev_priv, in intel_hpd_irq_handler() argument
476 spin_lock(&dev_priv->irq_lock); in intel_hpd_irq_handler()
484 for_each_intel_encoder(&dev_priv->drm, encoder) { in intel_hpd_irq_handler()
497 drm_dbg(&dev_priv->drm, in intel_hpd_irq_handler()
505 dev_priv->hotplug.long_port_mask |= BIT(port); in intel_hpd_irq_handler()
508 dev_priv->hotplug.short_port_mask |= BIT(port); in intel_hpd_irq_handler()
519 if (dev_priv->hotplug.stats[pin].state == HPD_DISABLED) { in intel_hpd_irq_handler()
526 drm_WARN_ONCE(&dev_priv->drm, !HAS_GMCH(dev_priv), in intel_hpd_irq_handler()
532 if (dev_priv->hotplug.stats[pin].state != HPD_ENABLED) in intel_hpd_irq_handler()
543 dev_priv->hotplug.event_bits |= BIT(pin); in intel_hpd_irq_handler()
548 if (intel_hpd_irq_storm_detect(dev_priv, pin, long_hpd)) { in intel_hpd_irq_handler()
549 dev_priv->hotplug.event_bits &= ~BIT(pin); in intel_hpd_irq_handler()
559 if (storm_detected && dev_priv->display_irqs_enabled) in intel_hpd_irq_handler()
560 dev_priv->display.hpd_irq_setup(dev_priv); in intel_hpd_irq_handler()
561 spin_unlock(&dev_priv->irq_lock); in intel_hpd_irq_handler()
570 queue_work(dev_priv->hotplug.dp_wq, &dev_priv->hotplug.dig_port_work); in intel_hpd_irq_handler()
572 queue_delayed_work(system_wq, &dev_priv->hotplug.hotplug_work, 0); in intel_hpd_irq_handler()
589 void intel_hpd_init(struct drm_i915_private *dev_priv) in intel_hpd_init() argument
594 dev_priv->hotplug.stats[i].count = 0; in intel_hpd_init()
595 dev_priv->hotplug.stats[i].state = HPD_ENABLED; in intel_hpd_init()
598 WRITE_ONCE(dev_priv->hotplug.poll_enabled, false); in intel_hpd_init()
599 schedule_work(&dev_priv->hotplug.poll_init_work); in intel_hpd_init()
605 if (dev_priv->display_irqs_enabled && dev_priv->display.hpd_irq_setup) { in intel_hpd_init()
606 spin_lock_irq(&dev_priv->irq_lock); in intel_hpd_init()
607 if (dev_priv->display_irqs_enabled) in intel_hpd_init()
608 dev_priv->display.hpd_irq_setup(dev_priv); in intel_hpd_init()
609 spin_unlock_irq(&dev_priv->irq_lock); in intel_hpd_init()
615 struct drm_i915_private *dev_priv = in i915_hpd_poll_init_work() local
618 struct drm_device *dev = &dev_priv->drm; in i915_hpd_poll_init_work()
625 enabled = READ_ONCE(dev_priv->hotplug.poll_enabled); in i915_hpd_poll_init_work()
672 void intel_hpd_poll_init(struct drm_i915_private *dev_priv) in intel_hpd_poll_init() argument
674 WRITE_ONCE(dev_priv->hotplug.poll_enabled, true); in intel_hpd_poll_init()
682 schedule_work(&dev_priv->hotplug.poll_init_work); in intel_hpd_poll_init()
685 void intel_hpd_init_work(struct drm_i915_private *dev_priv) in intel_hpd_init_work() argument
687 INIT_DELAYED_WORK(&dev_priv->hotplug.hotplug_work, in intel_hpd_init_work()
689 INIT_WORK(&dev_priv->hotplug.dig_port_work, i915_digport_work_func); in intel_hpd_init_work()
690 INIT_WORK(&dev_priv->hotplug.poll_init_work, i915_hpd_poll_init_work); in intel_hpd_init_work()
691 INIT_DELAYED_WORK(&dev_priv->hotplug.reenable_work, in intel_hpd_init_work()
695 void intel_hpd_cancel_work(struct drm_i915_private *dev_priv) in intel_hpd_cancel_work() argument
697 spin_lock_irq(&dev_priv->irq_lock); in intel_hpd_cancel_work()
699 dev_priv->hotplug.long_port_mask = 0; in intel_hpd_cancel_work()
700 dev_priv->hotplug.short_port_mask = 0; in intel_hpd_cancel_work()
701 dev_priv->hotplug.event_bits = 0; in intel_hpd_cancel_work()
702 dev_priv->hotplug.retry_bits = 0; in intel_hpd_cancel_work()
704 spin_unlock_irq(&dev_priv->irq_lock); in intel_hpd_cancel_work()
706 cancel_work_sync(&dev_priv->hotplug.dig_port_work); in intel_hpd_cancel_work()
707 cancel_delayed_work_sync(&dev_priv->hotplug.hotplug_work); in intel_hpd_cancel_work()
708 cancel_work_sync(&dev_priv->hotplug.poll_init_work); in intel_hpd_cancel_work()
709 cancel_delayed_work_sync(&dev_priv->hotplug.reenable_work); in intel_hpd_cancel_work()
712 bool intel_hpd_disable(struct drm_i915_private *dev_priv, enum hpd_pin pin) in intel_hpd_disable() argument
719 spin_lock_irq(&dev_priv->irq_lock); in intel_hpd_disable()
720 if (dev_priv->hotplug.stats[pin].state == HPD_ENABLED) { in intel_hpd_disable()
721 dev_priv->hotplug.stats[pin].state = HPD_DISABLED; in intel_hpd_disable()
724 spin_unlock_irq(&dev_priv->irq_lock); in intel_hpd_disable()
729 void intel_hpd_enable(struct drm_i915_private *dev_priv, enum hpd_pin pin) in intel_hpd_enable() argument
734 spin_lock_irq(&dev_priv->irq_lock); in intel_hpd_enable()
735 dev_priv->hotplug.stats[pin].state = HPD_ENABLED; in intel_hpd_enable()
736 spin_unlock_irq(&dev_priv->irq_lock); in intel_hpd_enable()