/Linux-v4.19/drivers/gpu/drm/radeon/ |
D | ni_dma.c | 320 unsigned ndw; in cayman_dma_vm_copy_pages() local 323 ndw = count * 2; in cayman_dma_vm_copy_pages() 324 if (ndw > 0xFFFFE) in cayman_dma_vm_copy_pages() 325 ndw = 0xFFFFE; in cayman_dma_vm_copy_pages() 328 0, 0, ndw); in cayman_dma_vm_copy_pages() 334 pe += ndw * 4; in cayman_dma_vm_copy_pages() 335 src += ndw * 4; in cayman_dma_vm_copy_pages() 336 count -= ndw / 2; in cayman_dma_vm_copy_pages() 360 unsigned ndw; in cayman_dma_vm_write_pages() local 363 ndw = count * 2; in cayman_dma_vm_write_pages() [all …]
|
D | si_dma.c | 112 unsigned ndw; in si_dma_vm_write_pages() local 115 ndw = count * 2; in si_dma_vm_write_pages() 116 if (ndw > 0xFFFFE) in si_dma_vm_write_pages() 117 ndw = 0xFFFFE; in si_dma_vm_write_pages() 120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 123 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages() 159 unsigned ndw; in si_dma_vm_set_pages() local 162 ndw = count * 2; in si_dma_vm_set_pages() 163 if (ndw > 0xFFFFE) in si_dma_vm_set_pages() 164 ndw = 0xFFFFE; in si_dma_vm_set_pages() [all …]
|
D | radeon_ring.c | 104 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_alloc() argument 109 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc() 114 ndw = (ndw + ring->align_mask) & ~ring->align_mask; in radeon_ring_alloc() 115 while (ndw > (ring->ring_free_dw - 1)) { in radeon_ring_alloc() 117 if (ndw < ring->ring_free_dw) { in radeon_ring_alloc() 124 ring->count_dw = ndw; in radeon_ring_alloc() 140 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_lock() argument 145 r = radeon_ring_alloc(rdev, ring, ndw); in radeon_ring_lock()
|
D | cik_sdma.c | 848 unsigned ndw; in cik_sdma_vm_write_pages() local 851 ndw = count * 2; in cik_sdma_vm_write_pages() 852 if (ndw > 0xFFFFE) in cik_sdma_vm_write_pages() 853 ndw = 0xFFFFE; in cik_sdma_vm_write_pages() 860 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pages() 861 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages() 897 unsigned ndw; in cik_sdma_vm_set_pages() local 900 ndw = count; in cik_sdma_vm_set_pages() 901 if (ndw > 0x7FFFF) in cik_sdma_vm_set_pages() 902 ndw = 0x7FFFF; in cik_sdma_vm_set_pages() [all …]
|
D | radeon_vm.c | 648 unsigned count = 0, pt_idx, ndw; in radeon_vm_update_page_directory() local 653 ndw = 64; in radeon_vm_update_page_directory() 656 ndw += vm->max_pde_used * 6; in radeon_vm_update_page_directory() 659 if (ndw > 0xfffff) in radeon_vm_update_page_directory() 662 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); in radeon_vm_update_page_directory() 706 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory() 918 unsigned nptes, ncmds, ndw; in radeon_vm_bo_update() local 974 ndw = 64; in radeon_vm_bo_update() 979 ndw += ncmds * 7; in radeon_vm_bo_update() 983 ndw += ncmds * 4; in radeon_vm_bo_update() [all …]
|
D | r100.c | 896 unsigned ndw; in r100_copy_blit() local 908 ndw = 64 + (10 * num_loops); in r100_copy_blit() 909 r = radeon_ring_lock(rdev, ring, ndw); in r100_copy_blit() 911 DRM_ERROR("radeon: moving bo (%d) asking for %u dw.\n", r, ndw); in r100_copy_blit()
|
D | radeon.h | 1022 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw); 1023 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
|
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/ |
D | si_dma.c | 361 unsigned ndw = count * 2; in si_dma_vm_write_pte() local 363 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte() 366 for (; ndw > 0; ndw -= 2) { in si_dma_vm_write_pte() 391 unsigned ndw; in si_dma_vm_set_pte_pde() local 394 ndw = count * 2; in si_dma_vm_set_pte_pde() 395 if (ndw > 0xFFFFE) in si_dma_vm_set_pte_pde() 396 ndw = 0xFFFFE; in si_dma_vm_set_pte_pde() 404 ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); in si_dma_vm_set_pte_pde() 413 pe += ndw * 4; in si_dma_vm_set_pte_pde() 414 addr += (ndw / 2) * incr; in si_dma_vm_set_pte_pde() [all …]
|
D | amdgpu_ring.c | 64 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw) in amdgpu_ring_alloc() argument 68 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc() 73 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc() 76 ring->count_dw = ndw; in amdgpu_ring_alloc()
|
D | amdgpu_vm.c | 1046 unsigned ndw = 0; in amdgpu_vm_update_directories() local 1071 ndw = 512 * 8; in amdgpu_vm_update_directories() 1072 r = amdgpu_job_alloc_with_ib(adev, ndw * 4, &job); in amdgpu_vm_update_directories() 1103 (ndw - params.ib->length_dw) < 32) in amdgpu_vm_update_directories() 1124 WARN_ON(params.ib->length_dw > ndw); in amdgpu_vm_update_directories() 1376 unsigned nptes, ncmds, ndw; in amdgpu_vm_bo_update_mapping() local 1424 ndw = 64; in amdgpu_vm_bo_update_mapping() 1428 ndw += ncmds * adev->vm_manager.vm_pte_funcs->copy_pte_num_dw; in amdgpu_vm_bo_update_mapping() 1431 ndw += nptes * 2; in amdgpu_vm_bo_update_mapping() 1437 ndw += ncmds * 10; in amdgpu_vm_bo_update_mapping() [all …]
|
D | cik_sdma.c | 774 unsigned ndw = count * 2; in cik_sdma_vm_write_pte() local 780 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pte() 781 for (; ndw > 0; ndw -= 2) { in cik_sdma_vm_write_pte()
|
D | sdma_v2_4.c | 754 unsigned ndw = count * 2; in sdma_v2_4_vm_write_pte() local 760 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte() 761 for (; ndw > 0; ndw -= 2) { in sdma_v2_4_vm_write_pte()
|
D | sdma_v3_0.c | 1026 unsigned ndw = count * 2; in sdma_v3_0_vm_write_pte() local 1032 ib->ptr[ib->length_dw++] = ndw; in sdma_v3_0_vm_write_pte() 1033 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
|
D | amdgpu_ring.h | 224 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw);
|
D | sdma_v4_0.c | 1091 unsigned ndw = count * 2; in sdma_v4_0_vm_write_pte() local 1097 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_0_vm_write_pte() 1098 for (; ndw > 0; ndw -= 2) { in sdma_v4_0_vm_write_pte()
|
/Linux-v4.19/kernel/rcu/ |
D | tree_plugin.h | 2360 int ndw; in do_nocb_deferred_wakeup_common() local 2367 ndw = READ_ONCE(rdp->nocb_defer_wakeup); in do_nocb_deferred_wakeup_common() 2369 __wake_nocb_leader(rdp, ndw == RCU_NOCB_WAKE_FORCE, flags); in do_nocb_deferred_wakeup_common()
|