/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_fence.c | 98 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write() 114 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read() 146 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit() 149 &ring->fence_drv.lock, in amdgpu_fence_emit() 152 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit() 155 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit() 189 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit_polling() 190 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling() 207 mod_timer(&ring->fence_drv.fallback_timer, in amdgpu_fence_schedule_fallback() 222 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_process() [all …]
|
D | amdgpu_job.c | 37 job->base.sched->name, atomic_read(&ring->fence_drv.last_seq), in amdgpu_job_timedout() 38 ring->fence_drv.sync_seq); in amdgpu_job_timedout()
|
D | uvd_v6_0.c | 1071 uint32_t seq = ring->fence_drv.sync_seq; in uvd_v6_0_ring_emit_pipeline_sync() 1072 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_ring_emit_pipeline_sync() 1100 uint32_t seq = ring->fence_drv.sync_seq; in uvd_v6_0_enc_ring_emit_pipeline_sync() 1101 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_enc_ring_emit_pipeline_sync()
|
D | amdgpu_ring.h | 176 struct amdgpu_fence_driver fence_drv; member
|
D | si_dma.c | 440 uint32_t seq = ring->fence_drv.sync_seq; in si_dma_ring_emit_pipeline_sync() 441 uint64_t addr = ring->fence_drv.gpu_addr; in si_dma_ring_emit_pipeline_sync()
|
D | vce_v3_0.c | 864 uint32_t seq = ring->fence_drv.sync_seq; in vce_v3_0_emit_pipeline_sync() 865 uint64_t addr = ring->fence_drv.gpu_addr; in vce_v3_0_emit_pipeline_sync()
|
D | cik_sdma.c | 849 uint32_t seq = ring->fence_drv.sync_seq; in cik_sdma_ring_emit_pipeline_sync() 850 uint64_t addr = ring->fence_drv.gpu_addr; in cik_sdma_ring_emit_pipeline_sync()
|
D | sdma_v2_4.c | 829 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v2_4_ring_emit_pipeline_sync() 830 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v2_4_ring_emit_pipeline_sync()
|
D | sdma_v3_0.c | 1101 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v3_0_ring_emit_pipeline_sync() 1102 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v3_0_ring_emit_pipeline_sync()
|
D | sdma_v4_0.c | 1168 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v4_0_ring_emit_pipeline_sync() 1169 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v4_0_ring_emit_pipeline_sync()
|
D | gfx_v6_0.c | 2306 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v6_0_ring_emit_pipeline_sync() 2307 uint64_t addr = ring->fence_drv.gpu_addr; in gfx_v6_0_ring_emit_pipeline_sync()
|
/Linux-v4.19/drivers/gpu/drm/radeon/ |
D | radeon_fence.c | 64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write() 85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read() 115 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check() 141 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 172 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 226 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity() 228 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity() 253 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity() 271 struct radeon_fence_driver *fence_drv; in radeon_fence_check_lockup() local 275 fence_drv = container_of(work, struct radeon_fence_driver, in radeon_fence_check_lockup() [all …]
|
D | uvd_v2_2.c | 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
|
D | evergreen_dma.c | 45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
|
D | uvd_v1_0.c | 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
|
D | r600_dma.c | 291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
|
D | radeon_vce.c | 739 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit()
|
D | cik_sdma.c | 204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
|
D | r300.c | 236 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); in r300_fence_ring_emit()
|
D | ni.c | 1403 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit()
|
D | r600.c | 2871 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_fence_ring_emit() 2900 …radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET… in r600_fence_ring_emit()
|
/Linux-v4.19/drivers/gpu/drm/virtio/ |
D | virtgpu_fence.c | 74 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_emit() 98 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_event_process() 103 atomic64_set(&vgdev->fence_drv.last_seq, last_seq); in virtio_gpu_fence_event_process()
|
D | virtgpu_debugfs.c | 38 (u64)atomic64_read(&vgdev->fence_drv.last_seq), in virtio_gpu_debugfs_irq_info() 39 vgdev->fence_drv.sync_seq); in virtio_gpu_debugfs_irq_info()
|
D | virtgpu_kms.c | 162 vgdev->fence_drv.context = dma_fence_context_alloc(1); in virtio_gpu_driver_load() 163 spin_lock_init(&vgdev->fence_drv.lock); in virtio_gpu_driver_load() 164 INIT_LIST_HEAD(&vgdev->fence_drv.fences); in virtio_gpu_driver_load()
|
D | virtgpu_drv.h | 191 struct virtio_gpu_fence_driver fence_drv; member
|