Home
last modified time | relevance | path

Searched refs:length_dw (Results 1 – 25 of 56) sorted by relevance

123

/Linux-v4.19/drivers/gpu/drm/radeon/
Dsi_dma.c79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
133 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages()
134 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages()
[all …]
Dradeon_vce.c363 ib.length_dw = 0; in radeon_vce_get_create_msg()
364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg()
365 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg()
366 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg()
368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg()
369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg()
370 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg()
371 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000042); in radeon_vce_get_create_msg()
372 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000a); in radeon_vce_get_create_msg()
373 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); in radeon_vce_get_create_msg()
[all …]
Dni_dma.c146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
327 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
332 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages()
368 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages()
370 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
371 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages()
382 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages()
[all …]
Dradeon_cs.c89 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs()
311 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init()
318 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
324 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
330 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
334 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
355 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
357 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
548 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk()
614 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill()
[all …]
Dcik_sdma.c157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
732 ib.length_dw = 5; in cik_sdma_ib_test()
813 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages()
815 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages()
816 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages()
817 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages()
818 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages()
819 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
820 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages()
856 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages()
[all …]
Dradeon_vm.c411 ib.length_dw = 0; in radeon_vm_clear_bo()
415 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
665 ib.length_dw = 0; in radeon_vm_update_page_directory()
702 if (ib.length_dw != 0) { in radeon_vm_update_page_directory()
706 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
1003 ib.length_dw = 0; in radeon_vm_bo_update()
1021 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
Dradeon_uvd.c585 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc()
587 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc()
700 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse()
702 p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
730 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
761 ib.length_dw = 16; in radeon_uvd_send_msg()
Dr600_dma.c363 ib.length_dw = 4; in r600_dma_ib_test()
427 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute()
Devergreen_dma.c90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
Dradeon_ib.c128 if (!ib->length_dw || !ring->ready) { in radeon_ib_schedule()
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vce.c450 ib->length_dw = 0; in amdgpu_vce_get_create_msg()
451 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg()
452 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg()
453 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg()
456 ib->ptr[ib->length_dw++] = 0x00000040; /* len */ in amdgpu_vce_get_create_msg()
458 ib->ptr[ib->length_dw++] = 0x00000030; /* len */ in amdgpu_vce_get_create_msg()
459 ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */ in amdgpu_vce_get_create_msg()
460 ib->ptr[ib->length_dw++] = 0x00000000; in amdgpu_vce_get_create_msg()
461 ib->ptr[ib->length_dw++] = 0x00000042; in amdgpu_vce_get_create_msg()
462 ib->ptr[ib->length_dw++] = 0x0000000a; in amdgpu_vce_get_create_msg()
[all …]
Damdgpu_vcn.c312 ib->length_dw = 16; in amdgpu_vcn_dec_send_msg()
482 ib->length_dw = 0; in amdgpu_vcn_enc_get_create_msg()
483 ib->ptr[ib->length_dw++] = 0x00000018; in amdgpu_vcn_enc_get_create_msg()
484 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in amdgpu_vcn_enc_get_create_msg()
485 ib->ptr[ib->length_dw++] = handle; in amdgpu_vcn_enc_get_create_msg()
486 ib->ptr[ib->length_dw++] = upper_32_bits(dummy); in amdgpu_vcn_enc_get_create_msg()
487 ib->ptr[ib->length_dw++] = dummy; in amdgpu_vcn_enc_get_create_msg()
488 ib->ptr[ib->length_dw++] = 0x0000000b; in amdgpu_vcn_enc_get_create_msg()
490 ib->ptr[ib->length_dw++] = 0x00000014; in amdgpu_vcn_enc_get_create_msg()
491 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in amdgpu_vcn_enc_get_create_msg()
[all …]
Dsi_dma.c74 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
291 ib.length_dw = 4; in si_dma_ring_test_ib()
338 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pte()
340 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte()
341 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pte()
342 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pte()
343 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pte()
363 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte()
364 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte()
365 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in si_dma_vm_write_pte()
[all …]
Dcik_sdma.c232 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib()
702 ib.length_dw = 5; in cik_sdma_ring_test_ib()
749 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pte()
751 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pte()
752 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pte()
753 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pte()
754 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pte()
755 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pte()
756 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pte()
776 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pte()
[all …]
Dsdma_v2_4.c259 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
681 ib.length_dw = 8; in sdma_v2_4_ring_test_ib()
729 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v2_4_vm_copy_pte()
731 ib->ptr[ib->length_dw++] = bytes; in sdma_v2_4_vm_copy_pte()
732 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v2_4_vm_copy_pte()
733 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v2_4_vm_copy_pte()
734 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v2_4_vm_copy_pte()
735 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v2_4_vm_copy_pte()
736 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v2_4_vm_copy_pte()
756 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_vm_write_pte()
[all …]
Dsdma_v3_0.c434 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib()
954 ib.length_dw = 8; in sdma_v3_0_ring_test_ib()
1001 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v3_0_vm_copy_pte()
1003 ib->ptr[ib->length_dw++] = bytes; in sdma_v3_0_vm_copy_pte()
1004 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte()
1005 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v3_0_vm_copy_pte()
1006 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v3_0_vm_copy_pte()
1007 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v3_0_vm_copy_pte()
1008 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v3_0_vm_copy_pte()
1028 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_vm_write_pte()
[all …]
Dsdma_v4_0.c392 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_0_ring_emit_ib()
1016 ib.length_dw = 8; in sdma_v4_0_ring_test_ib()
1064 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v4_0_vm_copy_pte()
1066 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v4_0_vm_copy_pte()
1067 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v4_0_vm_copy_pte()
1068 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v4_0_vm_copy_pte()
1069 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v4_0_vm_copy_pte()
1070 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v4_0_vm_copy_pte()
1071 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v4_0_vm_copy_pte()
1093 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v4_0_vm_write_pte()
[all …]
Duvd_v6_0.c231 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg()
232 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg()
233 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v6_0_enc_get_create_msg()
234 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg()
236 ib->ptr[ib->length_dw++] = upper_32_bits(dummy); in uvd_v6_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = dummy; in uvd_v6_0_enc_get_create_msg()
239 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v6_0_enc_get_create_msg()
240 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in uvd_v6_0_enc_get_create_msg()
241 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v6_0_enc_get_create_msg()
[all …]
Duvd_v7_0.c239 ib->length_dw = 0; in uvd_v7_0_enc_get_create_msg()
240 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v7_0_enc_get_create_msg()
241 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v7_0_enc_get_create_msg()
242 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_create_msg()
243 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
244 ib->ptr[ib->length_dw++] = upper_32_bits(dummy); in uvd_v7_0_enc_get_create_msg()
245 ib->ptr[ib->length_dw++] = dummy; in uvd_v7_0_enc_get_create_msg()
247 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v7_0_enc_get_create_msg()
248 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in uvd_v7_0_enc_get_create_msg()
249 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v7_0_enc_get_create_msg()
[all …]
Dgfx_v8_0.c907 ib.length_dw = 5; in gfx_v8_0_ring_test_ib()
1669 ib.length_dw = 0; in gfx_v8_0_do_edc_gpr_workarounds()
1674 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1); in gfx_v8_0_do_edc_gpr_workarounds()
1675 ib.ptr[ib.length_dw++] = vgpr_init_regs[i] - PACKET3_SET_SH_REG_START; in gfx_v8_0_do_edc_gpr_workarounds()
1676 ib.ptr[ib.length_dw++] = vgpr_init_regs[i + 1]; in gfx_v8_0_do_edc_gpr_workarounds()
1680 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2); in gfx_v8_0_do_edc_gpr_workarounds()
1681 ib.ptr[ib.length_dw++] = mmCOMPUTE_PGM_LO - PACKET3_SET_SH_REG_START; in gfx_v8_0_do_edc_gpr_workarounds()
1682 ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr); in gfx_v8_0_do_edc_gpr_workarounds()
1683 ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr); in gfx_v8_0_do_edc_gpr_workarounds()
1686 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3); in gfx_v8_0_do_edc_gpr_workarounds()
[all …]
Damdgpu_cs.c162 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_parser_init()
164 size = p->chunks[i].length_dw; in amdgpu_cs_parser_init()
186 if (p->chunks[i].length_dw * sizeof(uint32_t) < size) { in amdgpu_cs_parser_init()
200 if (p->chunks[i].length_dw * sizeof(uint32_t) < size) { in amdgpu_cs_parser_init()
1040 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_ib_fill()
1064 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_fence_dep()
1126 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_in_dep()
1144 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_out_dep()
Damdgpu_uvd.c898 if (ctx->idx >= ib->length_dw) { in amdgpu_uvd_cs_reg()
941 for (ctx->idx = 0 ; ctx->idx < ib->length_dw; ) { in amdgpu_uvd_cs_packets()
986 if (ib->length_dw % 16) { in amdgpu_uvd_ring_parse_cs()
988 ib->length_dw); in amdgpu_uvd_ring_parse_cs()
1071 ib->length_dw = 16; in amdgpu_uvd_send_msg()
Damdgpu_ring.c109 while (ib->length_dw & ring->funcs->align_mask) in amdgpu_ring_generic_pad_ib()
110 ib->ptr[ib->length_dw++] = ring->funcs->nop; in amdgpu_ring_generic_pad_ib()
Dsoc15.c239 u32 i, length_dw; in soc15_read_bios_from_rom() local
250 length_dw = ALIGN(length_bytes, 4) / 4; in soc15_read_bios_from_rom()
255 for (i = 0; i < length_dw; i++) in soc15_read_bios_from_rom()
/Linux-v4.19/drivers/net/ethernet/qlogic/qed/
Dqed_hw.c485 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
500 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
631 u32 length_dw) in qed_dmae_execute_sub_operation() argument
649 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
670 cmd->length_dw = cpu_to_le16((u16)length_dw); in qed_dmae_execute_sub_operation()
679 src_addr, dst_addr, length_dw); in qed_dmae_execute_sub_operation()
686 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()

123