Home
last modified time | relevance | path

Searched refs:length_dw (Results 1 – 25 of 74) sorted by relevance

123

/Linux-v6.1/drivers/gpu/drm/radeon/
Dsi_dma.c78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
132 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages()
133 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages()
[all …]
Dradeon_vce.c362 ib.length_dw = 0; in radeon_vce_get_create_msg()
363 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg()
364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg()
365 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg()
367 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg()
368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg()
369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg()
370 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000042); in radeon_vce_get_create_msg()
371 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000a); in radeon_vce_get_create_msg()
372 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); in radeon_vce_get_create_msg()
[all …]
Dni_dma.c145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages()
367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages()
369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages()
381 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages()
[all …]
Dradeon_cs.c95 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs()
316 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init()
323 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
329 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
335 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
339 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
360 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
362 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
559 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk()
625 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill()
[all …]
Dcik_sdma.c156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
731 ib.length_dw = 5; in cik_sdma_ib_test()
812 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages()
814 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages()
815 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages()
816 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages()
817 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages()
818 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
819 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages()
855 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages()
[all …]
Dradeon_vm.c412 ib.length_dw = 0; in radeon_vm_clear_bo()
416 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
664 ib.length_dw = 0; in radeon_vm_update_page_directory()
701 if (ib.length_dw != 0) { in radeon_vm_update_page_directory()
705 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
1002 ib.length_dw = 0; in radeon_vm_bo_update()
1020 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
Dradeon_uvd.c575 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc()
577 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc()
690 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse()
692 p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
720 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
751 ib.length_dw = 16; in radeon_uvd_send_msg()
/Linux-v6.1/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vce.c471 ib->length_dw = 0; in amdgpu_vce_get_create_msg()
472 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg()
473 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg()
474 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg()
477 ib->ptr[ib->length_dw++] = 0x00000040; /* len */ in amdgpu_vce_get_create_msg()
479 ib->ptr[ib->length_dw++] = 0x00000030; /* len */ in amdgpu_vce_get_create_msg()
480 ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */ in amdgpu_vce_get_create_msg()
481 ib->ptr[ib->length_dw++] = 0x00000000; in amdgpu_vce_get_create_msg()
482 ib->ptr[ib->length_dw++] = 0x00000042; in amdgpu_vce_get_create_msg()
483 ib->ptr[ib->length_dw++] = 0x0000000a; in amdgpu_vce_get_create_msg()
[all …]
Dsi_dma.c75 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
279 ib.length_dw = 4; in si_dma_ring_test_ib()
321 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pte()
323 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte()
324 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pte()
325 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pte()
326 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pte()
346 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte()
347 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte()
348 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in si_dma_vm_write_pte()
[all …]
Dsdma_v2_4.c267 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
628 ib.length_dw = 8; in sdma_v2_4_ring_test_ib()
671 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v2_4_vm_copy_pte()
673 ib->ptr[ib->length_dw++] = bytes; in sdma_v2_4_vm_copy_pte()
674 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v2_4_vm_copy_pte()
675 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v2_4_vm_copy_pte()
676 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v2_4_vm_copy_pte()
677 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v2_4_vm_copy_pte()
678 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v2_4_vm_copy_pte()
698 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_vm_write_pte()
[all …]
Dcik_sdma.c238 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib()
690 ib.length_dw = 5; in cik_sdma_ring_test_ib()
732 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pte()
734 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pte()
735 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pte()
736 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pte()
737 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pte()
738 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pte()
739 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pte()
759 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pte()
[all …]
Damdgpu_vcn.c629 ib->length_dw = 16; in amdgpu_vcn_dec_send_msg()
749 ib->ptr[ib->length_dw++] = 0x00000010; /* single queue checksum */ in amdgpu_vcn_unified_ring_ib_header()
750 ib->ptr[ib->length_dw++] = 0x30000002; in amdgpu_vcn_unified_ring_ib_header()
751 ib_checksum = &ib->ptr[ib->length_dw++]; in amdgpu_vcn_unified_ring_ib_header()
752 ib->ptr[ib->length_dw++] = ib_pack_in_dw; in amdgpu_vcn_unified_ring_ib_header()
754 ib->ptr[ib->length_dw++] = 0x00000010; /* engine info */ in amdgpu_vcn_unified_ring_ib_header()
755 ib->ptr[ib->length_dw++] = 0x30000001; in amdgpu_vcn_unified_ring_ib_header()
756 ib->ptr[ib->length_dw++] = enc ? 0x2 : 0x3; in amdgpu_vcn_unified_ring_ib_header()
757 ib->ptr[ib->length_dw++] = ib_pack_in_dw * sizeof(uint32_t); in amdgpu_vcn_unified_ring_ib_header()
799 ib->length_dw = 0; in amdgpu_vcn_dec_sw_send_msg()
[all …]
Dsdma_v3_0.c441 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib()
900 ib.length_dw = 8; in sdma_v3_0_ring_test_ib()
942 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v3_0_vm_copy_pte()
944 ib->ptr[ib->length_dw++] = bytes; in sdma_v3_0_vm_copy_pte()
945 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte()
946 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v3_0_vm_copy_pte()
947 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v3_0_vm_copy_pte()
948 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v3_0_vm_copy_pte()
949 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v3_0_vm_copy_pte()
969 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_vm_write_pte()
[all …]
Dsdma_v6_0.c290 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib()
1015 ib.length_dw = 8; in sdma_v6_0_ring_test_ib()
1067 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COPY) | in sdma_v6_0_vm_copy_pte()
1069 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v6_0_vm_copy_pte()
1070 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v6_0_vm_copy_pte()
1071 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v6_0_vm_copy_pte()
1072 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v6_0_vm_copy_pte()
1073 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v6_0_vm_copy_pte()
1074 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v6_0_vm_copy_pte()
1096 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_vm_write_pte()
[all …]
Dsdma_v5_2.c305 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib()
1014 ib.length_dw = 8; in sdma_v5_2_ring_test_ib()
1066 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_2_vm_copy_pte()
1068 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_2_vm_copy_pte()
1069 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_2_vm_copy_pte()
1070 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_2_vm_copy_pte()
1071 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_2_vm_copy_pte()
1072 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_2_vm_copy_pte()
1073 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_2_vm_copy_pte()
1094 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_2_vm_write_pte()
[all …]
Duvd_v6_0.c227 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg()
228 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg()
229 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v6_0_enc_get_create_msg()
230 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg()
231 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg()
232 ib->ptr[ib->length_dw++] = upper_32_bits(addr); in uvd_v6_0_enc_get_create_msg()
233 ib->ptr[ib->length_dw++] = addr; in uvd_v6_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v6_0_enc_get_create_msg()
236 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in uvd_v6_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v6_0_enc_get_create_msg()
[all …]
Dsdma_v5_0.c473 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib()
1144 ib.length_dw = 8; in sdma_v5_0_ring_test_ib()
1196 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_0_vm_copy_pte()
1198 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_0_vm_copy_pte()
1199 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_0_vm_copy_pte()
1200 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_0_vm_copy_pte()
1201 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_0_vm_copy_pte()
1202 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_0_vm_copy_pte()
1203 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_0_vm_copy_pte()
1224 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_0_vm_write_pte()
[all …]
Duvd_v7_0.c235 ib->length_dw = 0; in uvd_v7_0_enc_get_create_msg()
236 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v7_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v7_0_enc_get_create_msg()
238 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_create_msg()
239 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
240 ib->ptr[ib->length_dw++] = upper_32_bits(addr); in uvd_v7_0_enc_get_create_msg()
241 ib->ptr[ib->length_dw++] = addr; in uvd_v7_0_enc_get_create_msg()
243 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v7_0_enc_get_create_msg()
244 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in uvd_v7_0_enc_get_create_msg()
245 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v7_0_enc_get_create_msg()
[all …]
Dsdma_v4_0.c816 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_0_ring_emit_ib()
1555 ib.length_dw = 8; in sdma_v4_0_ring_test_ib()
1599 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v4_0_vm_copy_pte()
1601 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v4_0_vm_copy_pte()
1602 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v4_0_vm_copy_pte()
1603 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v4_0_vm_copy_pte()
1604 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v4_0_vm_copy_pte()
1605 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v4_0_vm_copy_pte()
1606 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v4_0_vm_copy_pte()
1627 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v4_0_vm_write_pte()
[all …]
Damdgpu_vm_sdma.c102 WARN_ON(ib->length_dw == 0); in amdgpu_vm_sdma_commit()
104 WARN_ON(ib->length_dw > p->num_dw_left); in amdgpu_vm_sdma_commit()
234 ndw -= p->job->ibs->length_dw; in amdgpu_vm_sdma_update()
Dgfx_v9_4_2.c377 ib->length_dw = 0; in gfx_v9_4_2_run_shader()
381 ib->ptr[ib->length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1); in gfx_v9_4_2_run_shader()
382 ib->ptr[ib->length_dw++] = SOC15_REG_ENTRY_OFFSET(init_regs[i]) in gfx_v9_4_2_run_shader()
384 ib->ptr[ib->length_dw++] = init_regs[i].reg_value; in gfx_v9_4_2_run_shader()
389 ib->ptr[ib->length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2); in gfx_v9_4_2_run_shader()
390 ib->ptr[ib->length_dw++] = SOC15_REG_OFFSET(GC, 0, regCOMPUTE_PGM_LO) in gfx_v9_4_2_run_shader()
392 ib->ptr[ib->length_dw++] = lower_32_bits(gpu_addr); in gfx_v9_4_2_run_shader()
393 ib->ptr[ib->length_dw++] = upper_32_bits(gpu_addr); in gfx_v9_4_2_run_shader()
396 ib->ptr[ib->length_dw++] = PACKET3(PACKET3_SET_SH_REG, 3); in gfx_v9_4_2_run_shader()
397 ib->ptr[ib->length_dw++] = SOC15_REG_OFFSET(GC, 0, regCOMPUTE_USER_DATA_0) in gfx_v9_4_2_run_shader()
[all …]
Dgfx_v9_0.c1047 ib.length_dw = 5; in gfx_v9_0_ring_test_ib()
4431 ib.length_dw = 0; in gfx_v9_0_do_edc_gpr_workarounds()
4436 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1); in gfx_v9_0_do_edc_gpr_workarounds()
4437 ib.ptr[ib.length_dw++] = SOC15_REG_ENTRY_OFFSET(vgpr_init_regs_ptr[i]) in gfx_v9_0_do_edc_gpr_workarounds()
4439 ib.ptr[ib.length_dw++] = vgpr_init_regs_ptr[i].reg_value; in gfx_v9_0_do_edc_gpr_workarounds()
4443 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2); in gfx_v9_0_do_edc_gpr_workarounds()
4444 ib.ptr[ib.length_dw++] = SOC15_REG_OFFSET(GC, 0, mmCOMPUTE_PGM_LO) in gfx_v9_0_do_edc_gpr_workarounds()
4446 ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr); in gfx_v9_0_do_edc_gpr_workarounds()
4447 ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr); in gfx_v9_0_do_edc_gpr_workarounds()
4450 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3); in gfx_v9_0_do_edc_gpr_workarounds()
[all …]
Dgfx_v8_0.c896 ib.length_dw = 5; in gfx_v8_0_ring_test_ib()
1565 ib.length_dw = 0; in gfx_v8_0_do_edc_gpr_workarounds()
1570 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1); in gfx_v8_0_do_edc_gpr_workarounds()
1571 ib.ptr[ib.length_dw++] = vgpr_init_regs[i] - PACKET3_SET_SH_REG_START; in gfx_v8_0_do_edc_gpr_workarounds()
1572 ib.ptr[ib.length_dw++] = vgpr_init_regs[i + 1]; in gfx_v8_0_do_edc_gpr_workarounds()
1576 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2); in gfx_v8_0_do_edc_gpr_workarounds()
1577 ib.ptr[ib.length_dw++] = mmCOMPUTE_PGM_LO - PACKET3_SET_SH_REG_START; in gfx_v8_0_do_edc_gpr_workarounds()
1578 ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr); in gfx_v8_0_do_edc_gpr_workarounds()
1579 ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr); in gfx_v8_0_do_edc_gpr_workarounds()
1582 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3); in gfx_v8_0_do_edc_gpr_workarounds()
[all …]
Damdgpu_cs.c229 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_pass1()
231 size = p->chunks[i].length_dw; in amdgpu_cs_pass1()
386 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_p2_ib()
399 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_dependencies()
471 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_in()
489 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_wait()
509 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_out()
543 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_signal()
1043 if ((va_start + ib->length_dw * 4) > in amdgpu_cs_patch_ibs()
1058 memcpy(ib->ptr, kptr, ib->length_dw * 4); in amdgpu_cs_patch_ibs()
/Linux-v6.1/drivers/net/ethernet/qlogic/qed/
Dqed_hw.c462 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
477 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
608 u32 length_dw) in qed_dmae_execute_sub_operation() argument
626 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
647 cmd->length_dw = cpu_to_le16((u16)length_dw); in qed_dmae_execute_sub_operation()
656 src_addr, dst_addr, length_dw); in qed_dmae_execute_sub_operation()
663 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()

123