Lines Matching +full:sub +full:- +full:sampled
1 /* i915_irq.c -- IRQ support for the I915 -*- linux-c -*-
11 * distribute, sub license, and/or sell copies of the Software, and to
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
157 struct i915_hotplug *hpd = &dev_priv->hotplug; in intel_hpd_init_pins()
162 hpd->hpd = hpd_status_g4x; in intel_hpd_init_pins()
164 hpd->hpd = hpd_status_i915; in intel_hpd_init_pins()
169 hpd->hpd = hpd_gen11; in intel_hpd_init_pins()
171 hpd->hpd = hpd_bxt; in intel_hpd_init_pins()
173 hpd->hpd = hpd_bdw; in intel_hpd_init_pins()
175 hpd->hpd = hpd_ivb; in intel_hpd_init_pins()
177 hpd->hpd = hpd_ilk; in intel_hpd_init_pins()
184 hpd->pch_hpd = hpd_icp; in intel_hpd_init_pins()
186 hpd->pch_hpd = hpd_spt; in intel_hpd_init_pins()
188 hpd->pch_hpd = hpd_cpt; in intel_hpd_init_pins()
190 hpd->pch_hpd = hpd_ibx; in intel_hpd_init_pins()
200 drm_crtc_handle_vblank(&crtc->base); in intel_handle_vblank()
242 drm_WARN(&uncore->i915->drm, 1, in gen3_assert_iir_is_zero()
258 drm_WARN(&uncore->i915->drm, 1, in gen2_assert_iir_is_zero()
297 lockdep_assert_held(&dev_priv->irq_lock); in i915_hotplug_interrupt_update_locked()
298 drm_WARN_ON(&dev_priv->drm, bits & ~mask); in i915_hotplug_interrupt_update_locked()
307 * i915_hotplug_interrupt_update - update hotplug interrupt enable
312 * of an interrupt context. To avoid that read-modify-write cycles
315 * held already, this function acquires the lock itself. A non-locking
322 spin_lock_irq(&dev_priv->irq_lock); in i915_hotplug_interrupt_update()
324 spin_unlock_irq(&dev_priv->irq_lock); in i915_hotplug_interrupt_update()
328 * ilk_update_display_irq - update DEIMR
339 lockdep_assert_held(&dev_priv->irq_lock); in ilk_update_display_irq()
341 drm_WARN_ON(&dev_priv->drm, enabled_irq_mask & ~interrupt_mask); in ilk_update_display_irq()
343 if (drm_WARN_ON(&dev_priv->drm, !intel_irqs_enabled(dev_priv))) in ilk_update_display_irq()
346 new_val = dev_priv->irq_mask; in ilk_update_display_irq()
350 if (new_val != dev_priv->irq_mask) { in ilk_update_display_irq()
351 dev_priv->irq_mask = new_val; in ilk_update_display_irq()
352 I915_WRITE(DEIMR, dev_priv->irq_mask); in ilk_update_display_irq()
358 * bdw_update_port_irq - update DE port interrupt
370 lockdep_assert_held(&dev_priv->irq_lock); in bdw_update_port_irq()
372 drm_WARN_ON(&dev_priv->drm, enabled_irq_mask & ~interrupt_mask); in bdw_update_port_irq()
374 if (drm_WARN_ON(&dev_priv->drm, !intel_irqs_enabled(dev_priv))) in bdw_update_port_irq()
390 * bdw_update_pipe_irq - update DE pipe interrupt
403 lockdep_assert_held(&dev_priv->irq_lock); in bdw_update_pipe_irq()
405 drm_WARN_ON(&dev_priv->drm, enabled_irq_mask & ~interrupt_mask); in bdw_update_pipe_irq()
407 if (drm_WARN_ON(&dev_priv->drm, !intel_irqs_enabled(dev_priv))) in bdw_update_pipe_irq()
410 new_val = dev_priv->de_irq_mask[pipe]; in bdw_update_pipe_irq()
414 if (new_val != dev_priv->de_irq_mask[pipe]) { in bdw_update_pipe_irq()
415 dev_priv->de_irq_mask[pipe] = new_val; in bdw_update_pipe_irq()
416 I915_WRITE(GEN8_DE_PIPE_IMR(pipe), dev_priv->de_irq_mask[pipe]); in bdw_update_pipe_irq()
422 * ibx_display_interrupt_update - update SDEIMR
435 drm_WARN_ON(&dev_priv->drm, enabled_irq_mask & ~interrupt_mask); in ibx_display_interrupt_update()
437 lockdep_assert_held(&dev_priv->irq_lock); in ibx_display_interrupt_update()
439 if (drm_WARN_ON(&dev_priv->drm, !intel_irqs_enabled(dev_priv))) in ibx_display_interrupt_update()
449 u32 status_mask = dev_priv->pipestat_irq_mask[pipe]; in i915_pipestat_enable_mask()
452 lockdep_assert_held(&dev_priv->irq_lock); in i915_pipestat_enable_mask()
461 if (drm_WARN_ON_ONCE(&dev_priv->drm, in i915_pipestat_enable_mask()
468 if (drm_WARN_ON_ONCE(&dev_priv->drm, in i915_pipestat_enable_mask()
481 drm_WARN_ONCE(&dev_priv->drm, in i915_pipestat_enable_mask()
496 drm_WARN_ONCE(&dev_priv->drm, status_mask & ~PIPESTAT_INT_STATUS_MASK, in i915_enable_pipestat()
500 lockdep_assert_held(&dev_priv->irq_lock); in i915_enable_pipestat()
501 drm_WARN_ON(&dev_priv->drm, !intel_irqs_enabled(dev_priv)); in i915_enable_pipestat()
503 if ((dev_priv->pipestat_irq_mask[pipe] & status_mask) == status_mask) in i915_enable_pipestat()
506 dev_priv->pipestat_irq_mask[pipe] |= status_mask; in i915_enable_pipestat()
519 drm_WARN_ONCE(&dev_priv->drm, status_mask & ~PIPESTAT_INT_STATUS_MASK, in i915_disable_pipestat()
523 lockdep_assert_held(&dev_priv->irq_lock); in i915_disable_pipestat()
524 drm_WARN_ON(&dev_priv->drm, !intel_irqs_enabled(dev_priv)); in i915_disable_pipestat()
526 if ((dev_priv->pipestat_irq_mask[pipe] & status_mask) == 0) in i915_disable_pipestat()
529 dev_priv->pipestat_irq_mask[pipe] &= ~status_mask; in i915_disable_pipestat()
538 if (!dev_priv->opregion.asle) in i915_has_asle()
545 * i915_enable_asle_pipestat - enable ASLE pipestat for OpRegion
553 spin_lock_irq(&dev_priv->irq_lock); in i915_enable_asle_pipestat()
560 spin_unlock_irq(&dev_priv->irq_lock); in i915_enable_asle_pipestat()
580 * | may be shifted forward 1-3 extra lines via PIPECONF
587 * ----va---> <-----------------vb--------------------> <--------va-------------
588 * | | <----vs-----> |
589 …* -vbs-----> <---vbs+1---> <---vbs+2---> <-----0-----> <-----1-----> <-----2--- (scanline counter …
590 …* -vbs-2---> <---vbs-1---> <---vbs-----> <---vbs+1---> <---vbs+2---> <-----0--- (scanline counter …
591 …* -vbs-2---> <---vbs-2---> <---vbs-1---> <---vbs-----> <---vbs+1---> <---vbs+2- (scanline counter …
606 * - most events happen at the start of horizontal sync
607 * - frame start happens at the start of horizontal blank, 1-4 lines
609 * - gen3/4 pixel and frame counter are synchronized with the start
618 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i915_get_vblank_counter()
619 struct drm_vblank_crtc *vblank = &dev_priv->drm.vblank[drm_crtc_index(crtc)]; in i915_get_vblank_counter()
620 const struct drm_display_mode *mode = &vblank->hwmode; in i915_get_vblank_counter()
621 enum pipe pipe = to_intel_crtc(crtc)->pipe; in i915_get_vblank_counter()
633 * does not like us returning non-zero frame counter values in i915_get_vblank_counter()
635 * counter. Thus we must stop non-zero values leaking out. in i915_get_vblank_counter()
637 if (!vblank->max_vblank_count) in i915_get_vblank_counter()
640 htotal = mode->crtc_htotal; in i915_get_vblank_counter()
641 hsync_start = mode->crtc_hsync_start; in i915_get_vblank_counter()
642 vbl_start = mode->crtc_vblank_start; in i915_get_vblank_counter()
643 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in i915_get_vblank_counter()
650 vbl_start -= htotal - hsync_start; in i915_get_vblank_counter()
655 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags); in i915_get_vblank_counter()
668 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags); in i915_get_vblank_counter()
684 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in g4x_get_vblank_counter()
685 enum pipe pipe = to_intel_crtc(crtc)->pipe; in g4x_get_vblank_counter()
700 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); in __intel_get_crtc_scanline_from_timestamp()
702 &crtc->base.dev->vblank[drm_crtc_index(&crtc->base)]; in __intel_get_crtc_scanline_from_timestamp()
703 const struct drm_display_mode *mode = &vblank->hwmode; in __intel_get_crtc_scanline_from_timestamp()
704 u32 vblank_start = mode->crtc_vblank_start; in __intel_get_crtc_scanline_from_timestamp()
705 u32 vtotal = mode->crtc_vtotal; in __intel_get_crtc_scanline_from_timestamp()
706 u32 htotal = mode->crtc_htotal; in __intel_get_crtc_scanline_from_timestamp()
707 u32 clock = mode->crtc_clock; in __intel_get_crtc_scanline_from_timestamp()
720 * is sampled at every start of vertical blank. in __intel_get_crtc_scanline_from_timestamp()
723 PIPE_FRMTMSTMP(crtc->pipe)); in __intel_get_crtc_scanline_from_timestamp()
732 PIPE_FRMTMSTMP(crtc->pipe)); in __intel_get_crtc_scanline_from_timestamp()
735 scanline = div_u64(mul_u32_u32(scan_curr_time - scan_prev_time, in __intel_get_crtc_scanline_from_timestamp()
737 scanline = min(scanline, vtotal - 1); in __intel_get_crtc_scanline_from_timestamp()
749 struct drm_device *dev = crtc->base.dev; in __intel_get_crtc_scanline()
753 enum pipe pipe = crtc->pipe; in __intel_get_crtc_scanline()
756 if (!crtc->active) in __intel_get_crtc_scanline()
757 return -1; in __intel_get_crtc_scanline()
759 vblank = &crtc->base.dev->vblank[drm_crtc_index(&crtc->base)]; in __intel_get_crtc_scanline()
760 mode = &vblank->hwmode; in __intel_get_crtc_scanline()
762 if (crtc->mode_flags & I915_MODE_FLAG_GET_SCANLINE_FROM_TIMESTAMP) in __intel_get_crtc_scanline()
765 vtotal = mode->crtc_vtotal; in __intel_get_crtc_scanline()
766 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in __intel_get_crtc_scanline()
803 return (position + crtc->scanline_offset) % vtotal; in __intel_get_crtc_scanline()
812 struct drm_device *dev = _crtc->dev; in i915_get_crtc_scanoutpos()
815 enum pipe pipe = crtc->pipe; in i915_get_crtc_scanoutpos()
821 crtc->mode_flags & I915_MODE_FLAG_USE_SCANLINE_COUNTER; in i915_get_crtc_scanoutpos()
823 if (drm_WARN_ON(&dev_priv->drm, !mode->crtc_clock)) { in i915_get_crtc_scanoutpos()
824 drm_dbg(&dev_priv->drm, in i915_get_crtc_scanoutpos()
830 htotal = mode->crtc_htotal; in i915_get_crtc_scanoutpos()
831 hsync_start = mode->crtc_hsync_start; in i915_get_crtc_scanoutpos()
832 vtotal = mode->crtc_vtotal; in i915_get_crtc_scanoutpos()
833 vbl_start = mode->crtc_vblank_start; in i915_get_crtc_scanoutpos()
834 vbl_end = mode->crtc_vblank_end; in i915_get_crtc_scanoutpos()
836 if (mode->flags & DRM_MODE_FLAG_INTERLACE) { in i915_get_crtc_scanoutpos()
847 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags); in i915_get_crtc_scanoutpos()
882 position = vtotal - 1; in i915_get_crtc_scanoutpos()
891 * always add htotal-hsync_start to the current pixel position. in i915_get_crtc_scanoutpos()
893 position = (position + htotal - hsync_start) % vtotal; in i915_get_crtc_scanoutpos()
902 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags); in i915_get_crtc_scanoutpos()
911 position -= vbl_end; in i915_get_crtc_scanoutpos()
913 position += vtotal - vbl_end; in i915_get_crtc_scanoutpos()
920 *hpos = position - (*vpos * htotal); in i915_get_crtc_scanoutpos()
936 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); in intel_get_crtc_scanline()
940 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags); in intel_get_crtc_scanline()
942 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags); in intel_get_crtc_scanline()
948 * ivb_parity_work - Workqueue called when a parity error interrupt
960 struct intel_gt *gt = &dev_priv->gt; in ivb_parity_work()
970 mutex_lock(&dev_priv->drm.struct_mutex); in ivb_parity_work()
973 if (drm_WARN_ON(&dev_priv->drm, !dev_priv->l3_parity.which_slice)) in ivb_parity_work()
980 while ((slice = ffs(dev_priv->l3_parity.which_slice)) != 0) { in ivb_parity_work()
983 slice--; in ivb_parity_work()
984 if (drm_WARN_ON_ONCE(&dev_priv->drm, in ivb_parity_work()
988 dev_priv->l3_parity.which_slice &= ~(1<<slice); in ivb_parity_work()
1007 kobject_uevent_env(&dev_priv->drm.primary->kdev->kobj, in ivb_parity_work()
1010 DRM_DEBUG("Parity error: Slice = %d, Row = %d, Bank = %d, Sub bank = %d.\n", in ivb_parity_work()
1022 drm_WARN_ON(&dev_priv->drm, dev_priv->l3_parity.which_slice); in ivb_parity_work()
1023 spin_lock_irq(>->irq_lock); in ivb_parity_work()
1025 spin_unlock_irq(>->irq_lock); in ivb_parity_work()
1027 mutex_unlock(&dev_priv->drm.struct_mutex); in ivb_parity_work()
1189 drm_dbg(&dev_priv->drm, in intel_get_hpd_pins()
1197 wake_up_all(&dev_priv->gmbus_wait_queue); in gmbus_irq_handler()
1202 wake_up_all(&dev_priv->gmbus_wait_queue); in dp_aux_irq_handler()
1213 struct intel_pipe_crc *pipe_crc = &crtc->pipe_crc; in display_pipe_crc_irq_handler()
1218 spin_lock(&pipe_crc->lock); in display_pipe_crc_irq_handler()
1227 if (pipe_crc->skipped <= 0 || in display_pipe_crc_irq_handler()
1228 (INTEL_GEN(dev_priv) >= 8 && pipe_crc->skipped == 1)) { in display_pipe_crc_irq_handler()
1229 pipe_crc->skipped++; in display_pipe_crc_irq_handler()
1230 spin_unlock(&pipe_crc->lock); in display_pipe_crc_irq_handler()
1233 spin_unlock(&pipe_crc->lock); in display_pipe_crc_irq_handler()
1235 drm_crtc_add_crc_entry(&crtc->base, true, in display_pipe_crc_irq_handler()
1236 drm_crtc_accurate_vblank_count(&crtc->base), in display_pipe_crc_irq_handler()
1299 dev_priv->pipestat_irq_mask[pipe] = 0; in i9xx_pipestat_irq_reset()
1308 spin_lock(&dev_priv->irq_lock); in i9xx_pipestat_irq_ack()
1310 if (!dev_priv->display_irqs_enabled) { in i9xx_pipestat_irq_ack()
1311 spin_unlock(&dev_priv->irq_lock); in i9xx_pipestat_irq_ack()
1343 status_mask |= dev_priv->pipestat_irq_mask[pipe]; in i9xx_pipestat_irq_ack()
1366 spin_unlock(&dev_priv->irq_lock); in i9xx_pipestat_irq_ack()
1488 drm_WARN_ONCE(&dev_priv->drm, 1, in i9xx_hpd_irq_ack()
1510 dev_priv->hotplug.hpd, in i9xx_hpd_irq_handler()
1531 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in valleyview_irq_handler()
1555 * A CPU interrupt will only be raised when 'x' has a 0->1 edge. in valleyview_irq_handler()
1592 gen6_gt_irq_handler(&dev_priv->gt, gt_iir); in valleyview_irq_handler()
1594 gen6_rps_irq_handler(&dev_priv->gt.rps, pm_iir); in valleyview_irq_handler()
1602 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in valleyview_irq_handler()
1616 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in cherryview_irq_handler()
1639 * A CPU interrupt will only be raised when 'x' has a 0->1 edge. in cherryview_irq_handler()
1649 gen8_gt_irq_handler(&dev_priv->gt, master_ctl); in cherryview_irq_handler()
1679 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in cherryview_irq_handler()
1710 dev_priv->hotplug.pch_hpd, in ibx_hpd_irq_handler()
1726 drm_dbg(&dev_priv->drm, "PCH audio power change on port %d\n", in ibx_irq_handler()
1737 drm_dbg(&dev_priv->drm, "PCH HDCP audio interrupt\n"); in ibx_irq_handler()
1740 drm_dbg(&dev_priv->drm, "PCH transcoder audio interrupt\n"); in ibx_irq_handler()
1743 drm_err(&dev_priv->drm, "PCH poison interrupt\n"); in ibx_irq_handler()
1747 drm_dbg(&dev_priv->drm, " pipe %c FDI IIR: 0x%08x\n", in ibx_irq_handler()
1753 drm_dbg(&dev_priv->drm, "PCH transcoder CRC done interrupt\n"); in ibx_irq_handler()
1756 drm_dbg(&dev_priv->drm, in ibx_irq_handler()
1772 drm_err(&dev_priv->drm, "Poison interrupt\n"); in ivb_err_int_handler()
1795 drm_err(&dev_priv->drm, "PCH poison interrupt\n"); in cpt_serr_int_handler()
1814 drm_dbg(&dev_priv->drm, "PCH audio power change on port %c\n", in cpt_irq_handler()
1825 drm_dbg(&dev_priv->drm, "Audio CP request interrupt\n"); in cpt_irq_handler()
1828 drm_dbg(&dev_priv->drm, "Audio CP change interrupt\n"); in cpt_irq_handler()
1832 drm_dbg(&dev_priv->drm, " pipe %c FDI IIR: 0x%08x\n", in cpt_irq_handler()
1856 drm_WARN(&dev_priv->drm, !HAS_PCH_ICP(dev_priv), in icp_irq_handler()
1872 dev_priv->hotplug.pch_hpd, in icp_irq_handler()
1884 dev_priv->hotplug.pch_hpd, in icp_irq_handler()
1910 dev_priv->hotplug.pch_hpd, in spt_irq_handler()
1922 dev_priv->hotplug.pch_hpd, in spt_irq_handler()
1943 dev_priv->hotplug.hpd, in ilk_hpd_irq_handler()
1965 drm_err(&dev_priv->drm, "Poison interrupt\n"); in ilk_display_irq_handler()
1992 gen5_rps_irq_handler(&dev_priv->gt.rps); in ilk_display_irq_handler()
2038 * 1 - Disable Master Interrupt Control.
2039 * 2 - Find the source(s) of the interrupt.
2040 * 3 - Clear the Interrupt Identity bits (IIR).
2041 * 4 - Process the interrupt(s) that had bits set in the IIRs.
2042 * 5 - Re-enable Master Interrupt Control.
2047 void __iomem * const regs = i915->uncore.regs; in ilk_irq_handler()
2055 disable_rpm_wakeref_asserts(&i915->runtime_pm); in ilk_irq_handler()
2077 gen6_gt_irq_handler(&i915->gt, gt_iir); in ilk_irq_handler()
2079 gen5_gt_irq_handler(&i915->gt, gt_iir); in ilk_irq_handler()
2097 gen6_rps_irq_handler(&i915->gt.rps, pm_iir); in ilk_irq_handler()
2107 enable_rpm_wakeref_asserts(&i915->runtime_pm); in ilk_irq_handler()
2122 dev_priv->hotplug.hpd, in bxt_hpd_irq_handler()
2142 dev_priv->hotplug.hpd, in gen11_hpd_irq_handler()
2154 dev_priv->hotplug.hpd, in gen11_hpd_irq_handler()
2161 drm_err(&dev_priv->drm, in gen11_hpd_irq_handler()
2223 iir_reg = TRANS_PSR_IIR(dev_priv->psr.transcoder); in gen8_de_misc_irq_handler()
2237 drm_err(&dev_priv->drm, "Unexpected DE Misc interrupt\n"); in gen8_de_misc_irq_handler()
2254 drm_err(&dev_priv->drm, in gen8_de_irq_handler()
2266 drm_err(&dev_priv->drm, in gen8_de_irq_handler()
2305 drm_err(&dev_priv->drm, in gen8_de_irq_handler()
2309 drm_err(&dev_priv->drm, in gen8_de_irq_handler()
2321 drm_err(&dev_priv->drm, in gen8_de_irq_handler()
2340 drm_err(&dev_priv->drm, in gen8_de_irq_handler()
2351 * on older pch-split platforms. But this needs testing. in gen8_de_irq_handler()
2369 drm_dbg(&dev_priv->drm, in gen8_de_irq_handler()
2398 void __iomem * const regs = dev_priv->uncore.regs; in gen8_irq_handler()
2410 /* Find, queue (onto bottom-halves), then clear each source */ in gen8_irq_handler()
2411 gen8_gt_irq_handler(&dev_priv->gt, master_ctl); in gen8_irq_handler()
2415 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in gen8_irq_handler()
2417 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in gen8_irq_handler()
2428 void __iomem * const regs = gt->uncore->regs; in gen11_gu_misc_irq_ack()
2445 intel_opregion_asle_intr(gt->i915); in gen11_gu_misc_irq_handler()
2469 void __iomem * const regs = i915->uncore.regs; in gen11_display_irq_handler()
2472 disable_rpm_wakeref_asserts(&i915->runtime_pm); in gen11_display_irq_handler()
2482 enable_rpm_wakeref_asserts(&i915->runtime_pm); in gen11_display_irq_handler()
2490 void __iomem * const regs = i915->uncore.regs; in __gen11_irq_handler()
2491 struct intel_gt *gt = &i915->gt; in __gen11_irq_handler()
2504 /* Find, queue (onto bottom-halves), then clear each source */ in __gen11_irq_handler()
2543 * for this interrupt and ack them right away - we keep GEN11_MASTER_IRQ in dg1_master_intr_disable_and_ack()
2572 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i8xx_enable_vblank()
2573 enum pipe pipe = to_intel_crtc(crtc)->pipe; in i8xx_enable_vblank()
2576 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in i8xx_enable_vblank()
2578 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in i8xx_enable_vblank()
2585 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i915gm_enable_vblank()
2589 * Disabling render clock gating during C-states avoids in i915gm_enable_vblank()
2593 if (dev_priv->vblank_enabled++ == 0) in i915gm_enable_vblank()
2601 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i965_enable_vblank()
2602 enum pipe pipe = to_intel_crtc(crtc)->pipe; in i965_enable_vblank()
2605 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in i965_enable_vblank()
2608 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in i965_enable_vblank()
2615 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in ilk_enable_vblank()
2616 enum pipe pipe = to_intel_crtc(crtc)->pipe; in ilk_enable_vblank()
2621 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in ilk_enable_vblank()
2623 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in ilk_enable_vblank()
2636 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in bdw_enable_vblank()
2637 enum pipe pipe = to_intel_crtc(crtc)->pipe; in bdw_enable_vblank()
2640 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in bdw_enable_vblank()
2642 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in bdw_enable_vblank()
2658 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i8xx_disable_vblank()
2659 enum pipe pipe = to_intel_crtc(crtc)->pipe; in i8xx_disable_vblank()
2662 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in i8xx_disable_vblank()
2664 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in i8xx_disable_vblank()
2669 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i915gm_disable_vblank()
2673 if (--dev_priv->vblank_enabled == 0) in i915gm_disable_vblank()
2679 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in i965_disable_vblank()
2680 enum pipe pipe = to_intel_crtc(crtc)->pipe; in i965_disable_vblank()
2683 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in i965_disable_vblank()
2686 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in i965_disable_vblank()
2691 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in ilk_disable_vblank()
2692 enum pipe pipe = to_intel_crtc(crtc)->pipe; in ilk_disable_vblank()
2697 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in ilk_disable_vblank()
2699 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in ilk_disable_vblank()
2704 struct drm_i915_private *dev_priv = to_i915(crtc->dev); in bdw_disable_vblank()
2705 enum pipe pipe = to_intel_crtc(crtc)->pipe; in bdw_disable_vblank()
2708 spin_lock_irqsave(&dev_priv->irq_lock, irqflags); in bdw_disable_vblank()
2710 spin_unlock_irqrestore(&dev_priv->irq_lock, irqflags); in bdw_disable_vblank()
2715 struct intel_uncore *uncore = &dev_priv->uncore; in ibx_irq_reset()
2728 * interrupts. Hence we can't update it after the interrupt handler is enabled -
2739 drm_WARN_ON(&dev_priv->drm, I915_READ(SDEIER) != 0); in ibx_irq_pre_postinstall()
2746 struct intel_uncore *uncore = &dev_priv->uncore; in vlv_display_irq_reset()
2759 dev_priv->irq_mask = ~0u; in vlv_display_irq_reset()
2764 struct intel_uncore *uncore = &dev_priv->uncore; in vlv_display_irq_postinstall()
2786 drm_WARN_ON(&dev_priv->drm, dev_priv->irq_mask != ~0u); in vlv_display_irq_postinstall()
2788 dev_priv->irq_mask = ~enable_mask; in vlv_display_irq_postinstall()
2790 GEN3_IRQ_INIT(uncore, VLV_, dev_priv->irq_mask, enable_mask); in vlv_display_irq_postinstall()
2797 struct intel_uncore *uncore = &dev_priv->uncore; in ilk_irq_reset()
2808 gen5_gt_irq_reset(&dev_priv->gt); in ilk_irq_reset()
2818 gen5_gt_irq_reset(&dev_priv->gt); in valleyview_irq_reset()
2820 spin_lock_irq(&dev_priv->irq_lock); in valleyview_irq_reset()
2821 if (dev_priv->display_irqs_enabled) in valleyview_irq_reset()
2823 spin_unlock_irq(&dev_priv->irq_lock); in valleyview_irq_reset()
2828 struct intel_uncore *uncore = &dev_priv->uncore; in gen8_irq_reset()
2831 gen8_master_intr_disable(dev_priv->uncore.regs); in gen8_irq_reset()
2833 gen8_gt_irq_reset(&dev_priv->gt); in gen8_irq_reset()
2853 struct intel_uncore *uncore = &dev_priv->uncore; in gen11_display_irq_reset()
2901 struct intel_uncore *uncore = &dev_priv->uncore; in gen11_irq_reset()
2904 dg1_master_intr_disable_and_ack(dev_priv->uncore.regs); in gen11_irq_reset()
2906 gen11_master_intr_disable(dev_priv->uncore.regs); in gen11_irq_reset()
2908 gen11_gt_irq_reset(&dev_priv->gt); in gen11_irq_reset()
2918 struct intel_uncore *uncore = &dev_priv->uncore; in gen8_irq_power_well_post_enable()
2923 spin_lock_irq(&dev_priv->irq_lock); in gen8_irq_power_well_post_enable()
2926 spin_unlock_irq(&dev_priv->irq_lock); in gen8_irq_power_well_post_enable()
2932 dev_priv->de_irq_mask[pipe], in gen8_irq_power_well_post_enable()
2933 ~dev_priv->de_irq_mask[pipe] | extra_ier); in gen8_irq_power_well_post_enable()
2935 spin_unlock_irq(&dev_priv->irq_lock); in gen8_irq_power_well_post_enable()
2941 struct intel_uncore *uncore = &dev_priv->uncore; in gen8_irq_power_well_pre_disable()
2944 spin_lock_irq(&dev_priv->irq_lock); in gen8_irq_power_well_pre_disable()
2947 spin_unlock_irq(&dev_priv->irq_lock); in gen8_irq_power_well_pre_disable()
2954 spin_unlock_irq(&dev_priv->irq_lock); in gen8_irq_power_well_pre_disable()
2962 struct intel_uncore *uncore = &dev_priv->uncore; in cherryview_irq_reset()
2967 gen8_gt_irq_reset(&dev_priv->gt); in cherryview_irq_reset()
2971 spin_lock_irq(&dev_priv->irq_lock); in cherryview_irq_reset()
2972 if (dev_priv->display_irqs_enabled) in cherryview_irq_reset()
2974 spin_unlock_irq(&dev_priv->irq_lock); in cherryview_irq_reset()
2983 for_each_intel_encoder(&dev_priv->drm, encoder) in intel_hpd_enabled_irqs()
2984 if (dev_priv->hotplug.stats[encoder->hpd_pin].state == HPD_ENABLED) in intel_hpd_enabled_irqs()
2985 enabled_irqs |= hpd[encoder->hpd_pin]; in intel_hpd_enabled_irqs()
2996 for_each_intel_encoder(&dev_priv->drm, encoder) in intel_hpd_hotplug_irqs()
2997 hotplug_irqs |= hpd[encoder->hpd_pin]; in intel_hpd_hotplug_irqs()
3031 enabled_irqs = intel_hpd_enabled_irqs(dev_priv, dev_priv->hotplug.pch_hpd); in ibx_hpd_irq_setup()
3032 hotplug_irqs = intel_hpd_hotplug_irqs(dev_priv, dev_priv->hotplug.pch_hpd); in ibx_hpd_irq_setup()
3064 enabled_irqs = intel_hpd_enabled_irqs(dev_priv, dev_priv->hotplug.pch_hpd); in icp_hpd_irq_setup()
3065 hotplug_irqs = intel_hpd_hotplug_irqs(dev_priv, dev_priv->hotplug.pch_hpd); in icp_hpd_irq_setup()
3089 * the DDI-C pins instead of the TC1 pins. This means we should follow TGP's
3126 enabled_irqs = intel_hpd_enabled_irqs(dev_priv, dev_priv->hotplug.hpd); in gen11_hpd_irq_setup()
3127 hotplug_irqs = intel_hpd_hotplug_irqs(dev_priv, dev_priv->hotplug.hpd); in gen11_hpd_irq_setup()
3177 enabled_irqs = intel_hpd_enabled_irqs(dev_priv, dev_priv->hotplug.pch_hpd); in spt_hpd_irq_setup()
3178 hotplug_irqs = intel_hpd_hotplug_irqs(dev_priv, dev_priv->hotplug.pch_hpd); in spt_hpd_irq_setup()
3205 enabled_irqs = intel_hpd_enabled_irqs(dev_priv, dev_priv->hotplug.hpd); in ilk_hpd_irq_setup()
3206 hotplug_irqs = intel_hpd_hotplug_irqs(dev_priv, dev_priv->hotplug.hpd); in ilk_hpd_irq_setup()
3228 drm_dbg_kms(&dev_priv->drm, in __bxt_hpd_detection_setup()
3259 enabled_irqs = intel_hpd_enabled_irqs(dev_priv, dev_priv->hotplug.hpd); in bxt_hpd_irq_setup()
3260 hotplug_irqs = intel_hpd_hotplug_irqs(dev_priv, dev_priv->hotplug.hpd); in bxt_hpd_irq_setup()
3281 gen3_assert_iir_is_zero(&dev_priv->uncore, SDEIIR); in ibx_irq_postinstall()
3293 struct intel_uncore *uncore = &dev_priv->uncore; in ilk_irq_postinstall()
3316 dev_priv->irq_mask = ~display_mask; in ilk_irq_postinstall()
3320 GEN3_IRQ_INIT(uncore, DE, dev_priv->irq_mask, in ilk_irq_postinstall()
3323 gen5_gt_irq_postinstall(&dev_priv->gt); in ilk_irq_postinstall()
3333 * setup is guaranteed to run in single-threaded context. But we in ilk_irq_postinstall()
3335 spin_lock_irq(&dev_priv->irq_lock); in ilk_irq_postinstall()
3337 spin_unlock_irq(&dev_priv->irq_lock); in ilk_irq_postinstall()
3343 lockdep_assert_held(&dev_priv->irq_lock); in valleyview_enable_display_irqs()
3345 if (dev_priv->display_irqs_enabled) in valleyview_enable_display_irqs()
3348 dev_priv->display_irqs_enabled = true; in valleyview_enable_display_irqs()
3358 lockdep_assert_held(&dev_priv->irq_lock); in valleyview_disable_display_irqs()
3360 if (!dev_priv->display_irqs_enabled) in valleyview_disable_display_irqs()
3363 dev_priv->display_irqs_enabled = false; in valleyview_disable_display_irqs()
3372 gen5_gt_irq_postinstall(&dev_priv->gt); in valleyview_irq_postinstall()
3374 spin_lock_irq(&dev_priv->irq_lock); in valleyview_irq_postinstall()
3375 if (dev_priv->display_irqs_enabled) in valleyview_irq_postinstall()
3377 spin_unlock_irq(&dev_priv->irq_lock); in valleyview_irq_postinstall()
3385 struct intel_uncore *uncore = &dev_priv->uncore; in gen8_de_irq_postinstall()
3429 dev_priv->de_irq_mask[pipe] = ~de_pipe_masked; in gen8_de_irq_postinstall()
3434 dev_priv->de_irq_mask[pipe], in gen8_de_irq_postinstall()
3461 gen8_gt_irq_postinstall(&dev_priv->gt); in gen8_irq_postinstall()
3467 gen8_master_intr_enable(dev_priv->uncore.regs); in gen8_irq_postinstall()
3474 drm_WARN_ON(&dev_priv->drm, I915_READ(SDEIER) != 0); in icp_irq_postinstall()
3478 gen3_assert_iir_is_zero(&dev_priv->uncore, SDEIIR); in icp_irq_postinstall()
3497 struct intel_uncore *uncore = &dev_priv->uncore; in gen11_irq_postinstall()
3503 gen11_gt_irq_postinstall(&dev_priv->gt); in gen11_irq_postinstall()
3511 dg1_master_intr_enable(uncore->regs); in gen11_irq_postinstall()
3514 gen11_master_intr_enable(uncore->regs); in gen11_irq_postinstall()
3521 gen8_gt_irq_postinstall(&dev_priv->gt); in cherryview_irq_postinstall()
3523 spin_lock_irq(&dev_priv->irq_lock); in cherryview_irq_postinstall()
3524 if (dev_priv->display_irqs_enabled) in cherryview_irq_postinstall()
3526 spin_unlock_irq(&dev_priv->irq_lock); in cherryview_irq_postinstall()
3534 struct intel_uncore *uncore = &dev_priv->uncore; in i8xx_irq_reset()
3543 struct intel_uncore *uncore = &dev_priv->uncore; in i8xx_irq_postinstall()
3552 dev_priv->irq_mask = in i8xx_irq_postinstall()
3563 GEN2_IRQ_INIT(uncore, dev_priv->irq_mask, enable_mask); in i8xx_irq_postinstall()
3565 /* Interrupt setup is already guaranteed to be single-threaded, this is in i8xx_irq_postinstall()
3567 spin_lock_irq(&dev_priv->irq_lock); in i8xx_irq_postinstall()
3570 spin_unlock_irq(&dev_priv->irq_lock); in i8xx_irq_postinstall()
3576 struct intel_uncore *uncore = &i915->uncore; in i8xx_error_irq_ack()
3609 drm_dbg(&dev_priv->drm, "EIR stuck: 0x%04x, masked\n", in i8xx_error_irq_handler()
3647 drm_dbg(&dev_priv->drm, "EIR stuck: 0x%08x, masked\n", in i9xx_error_irq_handler()
3660 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in i8xx_irq_handler()
3667 iir = intel_uncore_read16(&dev_priv->uncore, GEN2_IIR); in i8xx_irq_handler()
3680 intel_uncore_write16(&dev_priv->uncore, GEN2_IIR, iir); in i8xx_irq_handler()
3683 intel_engine_signal_breadcrumbs(dev_priv->gt.engine[RCS0]); in i8xx_irq_handler()
3691 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in i8xx_irq_handler()
3698 struct intel_uncore *uncore = &dev_priv->uncore; in i915_irq_reset()
3712 struct intel_uncore *uncore = &dev_priv->uncore; in i915_irq_postinstall()
3719 dev_priv->irq_mask = in i915_irq_postinstall()
3736 dev_priv->irq_mask &= ~I915_DISPLAY_PORT_INTERRUPT; in i915_irq_postinstall()
3739 GEN3_IRQ_INIT(uncore, GEN2_, dev_priv->irq_mask, enable_mask); in i915_irq_postinstall()
3741 /* Interrupt setup is already guaranteed to be single-threaded, this is in i915_irq_postinstall()
3743 spin_lock_irq(&dev_priv->irq_lock); in i915_irq_postinstall()
3746 spin_unlock_irq(&dev_priv->irq_lock); in i915_irq_postinstall()
3760 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in i915_irq_handler()
3788 intel_engine_signal_breadcrumbs(dev_priv->gt.engine[RCS0]); in i915_irq_handler()
3799 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in i915_irq_handler()
3806 struct intel_uncore *uncore = &dev_priv->uncore; in i965_irq_reset()
3818 struct intel_uncore *uncore = &dev_priv->uncore; in i965_irq_postinstall()
3838 dev_priv->irq_mask = in i965_irq_postinstall()
3856 GEN3_IRQ_INIT(uncore, GEN2_, dev_priv->irq_mask, enable_mask); in i965_irq_postinstall()
3858 /* Interrupt setup is already guaranteed to be single-threaded, this is in i965_irq_postinstall()
3860 spin_lock_irq(&dev_priv->irq_lock); in i965_irq_postinstall()
3864 spin_unlock_irq(&dev_priv->irq_lock); in i965_irq_postinstall()
3873 lockdep_assert_held(&dev_priv->irq_lock); in i915_hpd_irq_setup()
3903 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in i965_irq_handler()
3930 intel_engine_signal_breadcrumbs(dev_priv->gt.engine[RCS0]); in i965_irq_handler()
3933 intel_engine_signal_breadcrumbs(dev_priv->gt.engine[VCS0]); in i965_irq_handler()
3944 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm); in i965_irq_handler()
3950 * intel_irq_init - initializes irq support
3958 struct drm_device *dev = &dev_priv->drm; in intel_irq_init()
3965 INIT_WORK(&dev_priv->l3_parity.error_work, ivb_parity_work); in intel_irq_init()
3967 dev_priv->l3_parity.remap_info[i] = NULL; in intel_irq_init()
3969 /* pre-gen11 the guc irqs bits are in the upper 16 bits of the pm reg */ in intel_irq_init()
3971 dev_priv->gt.pm_guc_events = GUC_INTR_GUC2HOST << 16; in intel_irq_init()
3973 dev->vblank_disable_immediate = true; in intel_irq_init()
3975 /* Most platforms treat the display irq block as an always-on in intel_irq_init()
3981 dev_priv->display_irqs_enabled = true; in intel_irq_init()
3983 dev_priv->display_irqs_enabled = false; in intel_irq_init()
3985 dev_priv->hotplug.hpd_storm_threshold = HPD_STORM_DEFAULT_THRESHOLD; in intel_irq_init()
3992 dev_priv->hotplug.hpd_short_storm_enabled = !HAS_DP_MST(dev_priv); in intel_irq_init()
3996 dev_priv->display.hpd_irq_setup = i915_hpd_irq_setup; in intel_irq_init()
3999 dev_priv->display.hpd_irq_setup = jsp_hpd_irq_setup; in intel_irq_init()
4001 dev_priv->display.hpd_irq_setup = mcc_hpd_irq_setup; in intel_irq_init()
4003 dev_priv->display.hpd_irq_setup = gen11_hpd_irq_setup; in intel_irq_init()
4005 dev_priv->display.hpd_irq_setup = bxt_hpd_irq_setup; in intel_irq_init()
4007 dev_priv->display.hpd_irq_setup = spt_hpd_irq_setup; in intel_irq_init()
4009 dev_priv->display.hpd_irq_setup = ilk_hpd_irq_setup; in intel_irq_init()
4014 * intel_irq_fini - deinitializes IRQ support
4024 kfree(i915->l3_parity.remap_info[i]); in intel_irq_fini()
4099 * intel_irq_install - enables the hardware interrupt
4107 * workers. Hence the split into this two-stage approach.
4111 int irq = dev_priv->drm.pdev->irq; in intel_irq_install()
4119 dev_priv->runtime_pm.irqs_enabled = true; in intel_irq_install()
4121 dev_priv->drm.irq_enabled = true; in intel_irq_install()
4128 dev_priv->drm.irq_enabled = false; in intel_irq_install()
4138 * intel_irq_uninstall - finilizes all irq handling
4146 int irq = dev_priv->drm.pdev->irq; in intel_irq_uninstall()
4154 if (!dev_priv->drm.irq_enabled) in intel_irq_uninstall()
4157 dev_priv->drm.irq_enabled = false; in intel_irq_uninstall()
4164 dev_priv->runtime_pm.irqs_enabled = false; in intel_irq_uninstall()
4168 * intel_runtime_pm_disable_interrupts - runtime interrupt disabling
4177 dev_priv->runtime_pm.irqs_enabled = false; in intel_runtime_pm_disable_interrupts()
4182 * intel_runtime_pm_enable_interrupts - runtime interrupt enabling
4190 dev_priv->runtime_pm.irqs_enabled = true; in intel_runtime_pm_enable_interrupts()
4201 return dev_priv->runtime_pm.irqs_enabled; in intel_irqs_enabled()
4206 synchronize_irq(i915->drm.pdev->irq); in intel_synchronize_irq()