Home
last modified time | relevance | path

Searched refs:gpu_addr (Results 1 – 25 of 122) sorted by relevance

12345

/Linux-v4.19/drivers/gpu/drm/radeon/
Dr600_dma.c144 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
146 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
151 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
237 u64 gpu_addr; in r600_dma_ring_test() local
244 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
255 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test()
256 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test()
291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
318 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit()
344 u64 gpu_addr; in r600_dma_ib_test() local
[all …]
Duvd_v4_2.c47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume()
49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume()
67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume()
71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
Duvd_v2_2.c43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit()
113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume()
130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume()
134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
Dcik_sdma.c155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
233 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit()
401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
408 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
409 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
652 u64 gpu_addr; in cik_sdma_ring_test() local
659 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
[all …]
Duvd_v1_0.c85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume()
138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume()
142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume()
364 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start()
374 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start()
487 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
Dradeon_semaphore.c51 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create()
69 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal()
86 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
Dvce_v1_0.c218 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume()
300 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start()
301 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
307 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start()
308 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
Dradeon_trace.h177 __field(uint64_t, gpu_addr)
183 __entry->gpu_addr = sem->gpu_addr;
187 __entry->waiters, __entry->gpu_addr)
Devergreen_dma.c45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
89 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute()
90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
Dradeon_object.h135 extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
137 u64 max_offset, u64 *gpu_addr);
166 return sa_bo->manager->gpu_addr + sa_bo->soffset; in radeon_sa_bo_gpu_addr()
Dni_dma.c145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()
225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
230 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/
Dvce_v4_0.c156 uint64_t addr = table->gpu_addr; in vce_v4_0_mmsch_start()
234 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
236 upper_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
257 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
260 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
264 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
267 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
270 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
273 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
339 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr); in vce_v4_0_start()
[all …]
Dsi_dma.c73 amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in si_dma_ring_emit_ib()
74 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
154 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start()
161 WREG32(DMA_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in si_dma_start()
209 u64 gpu_addr; in si_dma_ring_test_ring() local
217 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring()
229 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in si_dma_ring_test_ring()
230 amdgpu_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in si_dma_ring_test_ring()
268 u64 gpu_addr; in si_dma_ring_test_ib() local
277 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib()
[all …]
Dvcn_v1_0.c289 lower_32_bits(adev->vcn.gpu_addr)); in vcn_v1_0_mc_resume()
291 upper_32_bits(adev->vcn.gpu_addr)); in vcn_v1_0_mc_resume()
300 lower_32_bits(adev->vcn.gpu_addr + offset)); in vcn_v1_0_mc_resume()
302 upper_32_bits(adev->vcn.gpu_addr + offset)); in vcn_v1_0_mc_resume()
307 lower_32_bits(adev->vcn.gpu_addr + offset + AMDGPU_VCN_HEAP_SIZE)); in vcn_v1_0_mc_resume()
309 upper_32_bits(adev->vcn.gpu_addr + offset + AMDGPU_VCN_HEAP_SIZE)); in vcn_v1_0_mc_resume()
746 (upper_32_bits(ring->gpu_addr) >> 2)); in vcn_v1_0_start()
750 lower_32_bits(ring->gpu_addr)); in vcn_v1_0_start()
752 upper_32_bits(ring->gpu_addr)); in vcn_v1_0_start()
767 WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr); in vcn_v1_0_start()
[all …]
Duvd_v7_0.c237 dummy = ib->gpu_addr + 1024; in uvd_v7_0_enc_get_create_msg()
298 dummy = ib->gpu_addr + 1024; in uvd_v7_0_enc_get_destroy_msg()
673 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
675 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
684 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume()
686 upper_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume()
691 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset + AMDGPU_UVD_HEAP_SIZE)); in uvd_v7_0_mc_resume()
693 upper_32_bits(adev->uvd.inst[i].gpu_addr + offset + AMDGPU_UVD_HEAP_SIZE)); in uvd_v7_0_mc_resume()
713 uint64_t addr = table->gpu_addr; in uvd_v7_0_mmsch_start()
812 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
[all …]
Damdgpu_fence.c152 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
190 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
381 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring()
386 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
397 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring()
422 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
Dvce_v3_0.c284 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v3_0_start()
285 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
291 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v3_0_start()
292 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
298 WREG32(mmVCE_RB_BASE_LO3, ring->gpu_addr); in vce_v3_0_start()
299 WREG32(mmVCE_RB_BASE_HI3, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
547 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
548 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR1, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
549 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR2, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
551 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
[all …]
Dcik_sdma.c230 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib()
231 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib()
474 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
476 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
480 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
481 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
618 u64 gpu_addr; in cik_sdma_ring_test_ring() local
626 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ring()
637 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test_ring()
638 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in cik_sdma_ring_test_ring()
[all …]
Dsdma_v2_4.c257 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
258 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
450 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()
452 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
456 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v2_4_gfx_resume()
457 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v2_4_gfx_resume()
592 u64 gpu_addr; in sdma_v2_4_ring_test_ring() local
600 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring()
613 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
614 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
[all …]
Damdgpu_virt.c329 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
335 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
344 adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
356 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
360 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_free_mm_table()
362 adev->virt.mm_table.gpu_addr = 0; in amdgpu_virt_free_mm_table()
Duvd_v6_0.c229 dummy = ib->gpu_addr + 1024; in uvd_v6_0_enc_get_create_msg()
291 dummy = ib->gpu_addr + 1024; in uvd_v6_0_enc_get_destroy_msg()
598 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
600 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
836 WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2)); in uvd_v6_0_start()
840 lower_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
842 upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
856 WREG32(mmUVD_RB_BASE_LO, ring->gpu_addr); in uvd_v6_0_start()
857 WREG32(mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
863 WREG32(mmUVD_RB_BASE_LO2, ring->gpu_addr); in uvd_v6_0_start()
[all …]
Dsdma_v3_0.c432 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
433 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib()
690 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
692 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
696 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v3_0_gfx_resume()
697 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v3_0_gfx_resume()
711 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v3_0_gfx_resume()
865 u64 gpu_addr; in sdma_v3_0_ring_test_ring() local
873 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
886 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring()
[all …]
Damdgpu_object.h247 u64 *gpu_addr, void **cpu_addr);
251 u64 *gpu_addr, void **cpu_addr);
252 void amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr,
304 return sa_bo->manager->gpu_addr + sa_bo->soffset; in amdgpu_sa_bo_gpu_addr()
/Linux-v4.19/drivers/gpu/drm/mgag200/
Dmgag200_cursor.c53 u64 gpu_addr; in mga_crtc_cursor_set() local
218 gpu_addr = mdev->cursor.pixels_1_gpu_addr; in mga_crtc_cursor_set()
220 gpu_addr = mdev->cursor.pixels_2_gpu_addr; in mga_crtc_cursor_set()
221 WREG_DAC(MGA1064_CURSOR_BASE_ADR_LOW, (u8)((gpu_addr>>10) & 0xff)); in mga_crtc_cursor_set()
222 WREG_DAC(MGA1064_CURSOR_BASE_ADR_HI, (u8)((gpu_addr>>18) & 0x3f)); in mga_crtc_cursor_set()
/Linux-v4.19/drivers/gpu/drm/qxl/
Dqxl_object.c224 static int __qxl_bo_pin(struct qxl_bo *bo, u32 domain, u64 *gpu_addr) in __qxl_bo_pin() argument
232 if (gpu_addr) in __qxl_bo_pin()
233 *gpu_addr = qxl_bo_gpu_offset(bo); in __qxl_bo_pin()
240 if (gpu_addr != NULL) in __qxl_bo_pin()
241 *gpu_addr = qxl_bo_gpu_offset(bo); in __qxl_bo_pin()
275 int qxl_bo_pin(struct qxl_bo *bo, u32 domain, u64 *gpu_addr) in qxl_bo_pin() argument

12345