Searched refs:wb_offset (Results 1 – 14 of 14) sorted by relevance
191 u32 reg_offset, wb_offset; in cayman_dma_resume() local198 wb_offset = R600_WB_DMA_RPTR_OFFSET; in cayman_dma_resume()202 wb_offset = CAYMAN_WB_DMA1_RPTR_OFFSET; in cayman_dma_resume()222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
369 u32 reg_offset, wb_offset; in cik_sdma_gfx_resume() local376 wb_offset = R600_WB_DMA_RPTR_OFFSET; in cik_sdma_gfx_resume()380 wb_offset = CAYMAN_WB_DMA1_RPTR_OFFSET; in cik_sdma_gfx_resume()400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
47 unsigned int wb_offset, /* Offset & ~PAGE_MASK */ member208 return (((loff_t)req->wb_index) << PAGE_SHIFT) + req->wb_offset; in req_offset()
513 off = head->wb_offset; in nfs_join_page_group()529 head->wb_offset = off; in nfs_join_page_group()1125 rqend = req->wb_offset + req->wb_bytes; in nfs_try_to_update_request()1132 if (offset > rqend || end < req->wb_offset) in nfs_try_to_update_request()1136 if (offset < req->wb_offset) { in nfs_try_to_update_request()1137 req->wb_offset = offset; in nfs_try_to_update_request()1141 req->wb_bytes = end - req->wb_offset; in nfs_try_to_update_request()1143 req->wb_bytes = rqend - req->wb_offset; in nfs_try_to_update_request()
461 req->wb_offset = offset; in __nfs_create_request()1185 req->wb_offset += size; in __nfs_pageio_add_request()1209 req->wb_offset, size); in __nfs_pageio_add_request()1288 offset = req->wb_offset; in nfs_pageio_add_request()
388 req->wb_offset = pos & ~PAGE_MASK; in nfs_direct_read_schedule_iovec()840 req->wb_offset = pos & ~PAGE_MASK; in nfs_direct_write_schedule_iovec()
1182 __entry->offset = req->wb_offset;
417 u32 wb_offset; in sdma_v2_4_gfx_resume() local422 wb_offset = (ring->rptr_offs * 4); in sdma_v2_4_gfx_resume()458 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()460 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
439 u32 wb_offset; in cik_sdma_gfx_resume() local444 wb_offset = (ring->rptr_offs * 4); in cik_sdma_gfx_resume()480 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()482 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
1175 u32 wb_offset; in sdma_v4_0_gfx_resume() local1180 wb_offset = (ring->rptr_offs * 4); in sdma_v4_0_gfx_resume()1194 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_0_gfx_resume()1196 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_0_gfx_resume()1265 u32 wb_offset; in sdma_v4_0_page_resume() local1270 wb_offset = (ring->rptr_offs * 4); in sdma_v4_0_page_resume()1284 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_0_page_resume()1286 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_0_page_resume()
652 u32 wb_offset; in sdma_v3_0_gfx_resume() local660 wb_offset = (ring->rptr_offs * 4); in sdma_v3_0_gfx_resume()697 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()699 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
601 u32 wb_offset; in sdma_v5_2_gfx_resume() local611 wb_offset = (ring->rptr_offs * 4); in sdma_v5_2_gfx_resume()648 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_2_gfx_resume()650 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_2_gfx_resume()
719 u32 wb_offset; in sdma_v5_0_gfx_resume() local729 wb_offset = (ring->rptr_offs * 4); in sdma_v5_0_gfx_resume()767 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_0_gfx_resume()769 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_0_gfx_resume()
829 if (!IS_ALIGNED(req->wb_offset, alignment)) in is_aligned_req()