Lines Matching refs:ring

62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring)  in radeon_fence_write()  argument
64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
108 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
115 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
131 int ring) in radeon_fence_emit() argument
141 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
142 (*fence)->ring = ring; in radeon_fence_emit()
146 rdev->fence_context + ring, in radeon_fence_emit()
148 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit()
149 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit()
150 radeon_fence_schedule_check(rdev, ring); in radeon_fence_emit()
172 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled()
181 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled()
199 static bool radeon_fence_activity(struct radeon_device *rdev, int ring) in radeon_fence_activity() argument
226 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity()
228 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity()
229 seq = radeon_fence_read(rdev, ring); in radeon_fence_activity()
253 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity()
256 radeon_fence_schedule_check(rdev, ring); in radeon_fence_activity()
273 int ring; in radeon_fence_check_lockup() local
278 ring = fence_drv - &rdev->fence_drv[0]; in radeon_fence_check_lockup()
282 radeon_fence_schedule_check(rdev, ring); in radeon_fence_check_lockup()
295 if (radeon_fence_activity(rdev, ring)) in radeon_fence_check_lockup()
298 else if (radeon_ring_is_lockup(rdev, ring, &rdev->ring[ring])) { in radeon_fence_check_lockup()
304 fence_drv->sync_seq[ring], ring); in radeon_fence_check_lockup()
322 void radeon_fence_process(struct radeon_device *rdev, int ring) in radeon_fence_process() argument
324 if (radeon_fence_activity(rdev, ring)) in radeon_fence_process()
343 u64 seq, unsigned ring) in radeon_fence_seq_signaled() argument
345 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_seq_signaled()
349 radeon_fence_process(rdev, ring); in radeon_fence_seq_signaled()
350 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_seq_signaled()
360 unsigned ring = fence->ring; in radeon_fence_is_signaled() local
363 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_is_signaled()
368 radeon_fence_process(rdev, ring); in radeon_fence_is_signaled()
371 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_is_signaled()
391 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) in radeon_fence_enable_signaling()
395 radeon_irq_kms_sw_irq_get(rdev, fence->ring); in radeon_fence_enable_signaling()
397 if (radeon_fence_activity(rdev, fence->ring)) in radeon_fence_enable_signaling()
401 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { in radeon_fence_enable_signaling()
402 radeon_irq_kms_sw_irq_put(rdev, fence->ring); in radeon_fence_enable_signaling()
410 if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring)) in radeon_fence_enable_signaling()
411 rdev->fence_drv[fence->ring].delayed_irq = true; in radeon_fence_enable_signaling()
412 radeon_fence_schedule_check(rdev, fence->ring); in radeon_fence_enable_signaling()
421 DMA_FENCE_TRACE(&fence->base, "armed on ring %i!\n", fence->ring); in radeon_fence_enable_signaling()
438 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled()
559 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout()
645 int radeon_fence_wait_next(struct radeon_device *rdev, int ring) in radeon_fence_wait_next() argument
650 seq[ring] = atomic64_read(&rdev->fence_drv[ring].last_seq) + 1ULL; in radeon_fence_wait_next()
651 if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) { in radeon_fence_wait_next()
672 int radeon_fence_wait_empty(struct radeon_device *rdev, int ring) in radeon_fence_wait_empty() argument
677 seq[ring] = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_wait_empty()
678 if (!seq[ring]) in radeon_fence_wait_empty()
687 ring, r); in radeon_fence_wait_empty()
733 unsigned radeon_fence_count_emitted(struct radeon_device *rdev, int ring) in radeon_fence_count_emitted() argument
740 radeon_fence_process(rdev, ring); in radeon_fence_count_emitted()
741 emitted = rdev->fence_drv[ring].sync_seq[ring] in radeon_fence_count_emitted()
742 - atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_count_emitted()
769 if (fence->ring == dst_ring) { in radeon_fence_need_sync()
775 if (fence->seq <= fdrv->sync_seq[fence->ring]) { in radeon_fence_need_sync()
800 if (fence->ring == dst_ring) { in radeon_fence_note_sync()
805 src = &fence->rdev->fence_drv[fence->ring]; in radeon_fence_note_sync()
827 int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring) in radeon_fence_driver_start_ring() argument
832 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_start_ring()
833 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring()
834 rdev->fence_drv[ring].scratch_reg = 0; in radeon_fence_driver_start_ring()
835 if (ring != R600_RING_TYPE_UVD_INDEX) { in radeon_fence_driver_start_ring()
836 index = R600_WB_EVENT_OFFSET + ring * 4; in radeon_fence_driver_start_ring()
837 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
838 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
844 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; in radeon_fence_driver_start_ring()
845 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
849 r = radeon_scratch_get(rdev, &rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_start_ring()
855 rdev->fence_drv[ring].scratch_reg - in radeon_fence_driver_start_ring()
857 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
858 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
860 radeon_fence_write(rdev, atomic64_read(&rdev->fence_drv[ring].last_seq), ring); in radeon_fence_driver_start_ring()
861 rdev->fence_drv[ring].initialized = true; in radeon_fence_driver_start_ring()
863 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); in radeon_fence_driver_start_ring()
877 static void radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring) in radeon_fence_driver_init_ring() argument
881 rdev->fence_drv[ring].scratch_reg = -1; in radeon_fence_driver_init_ring()
882 rdev->fence_drv[ring].cpu_addr = NULL; in radeon_fence_driver_init_ring()
883 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
885 rdev->fence_drv[ring].sync_seq[i] = 0; in radeon_fence_driver_init_ring()
886 atomic64_set(&rdev->fence_drv[ring].last_seq, 0); in radeon_fence_driver_init_ring()
887 rdev->fence_drv[ring].initialized = false; in radeon_fence_driver_init_ring()
888 INIT_DELAYED_WORK(&rdev->fence_drv[ring].lockup_work, in radeon_fence_driver_init_ring()
890 rdev->fence_drv[ring].rdev = rdev; in radeon_fence_driver_init_ring()
907 int ring; in radeon_fence_driver_init() local
910 for (ring = 0; ring < RADEON_NUM_RINGS; ring++) { in radeon_fence_driver_init()
911 radeon_fence_driver_init_ring(rdev, ring); in radeon_fence_driver_init()
929 int ring, r; in radeon_fence_driver_fini() local
932 for (ring = 0; ring < RADEON_NUM_RINGS; ring++) { in radeon_fence_driver_fini()
933 if (!rdev->fence_drv[ring].initialized) in radeon_fence_driver_fini()
935 r = radeon_fence_wait_empty(rdev, ring); in radeon_fence_driver_fini()
938 radeon_fence_driver_force_completion(rdev, ring); in radeon_fence_driver_fini()
940 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); in radeon_fence_driver_fini()
942 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_fini()
943 rdev->fence_drv[ring].initialized = false; in radeon_fence_driver_fini()
957 void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring) in radeon_fence_driver_force_completion() argument
959 if (rdev->fence_drv[ring].initialized) { in radeon_fence_driver_force_completion()
960 radeon_fence_write(rdev, rdev->fence_drv[ring].sync_seq[ring], ring); in radeon_fence_driver_force_completion()
961 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); in radeon_fence_driver_force_completion()
1041 switch (fence->ring) { in radeon_fence_get_timeline_name()