Lines Matching refs:sdma
250 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_free_microcode()
251 release_firmware(adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode()
252 adev->sdma.instance[i].fw = NULL; in sdma_v3_0_free_microcode()
304 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
309 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v3_0_init_microcode()
312 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode()
315 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
316 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode()
317 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode()
318 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v3_0_init_microcode()
319 adev->sdma.instance[i].burst_nop = true; in sdma_v3_0_init_microcode()
324 info->fw = adev->sdma.instance[i].fw; in sdma_v3_0_init_microcode()
333 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
334 release_firmware(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode()
335 adev->sdma.instance[i].fw = NULL; in sdma_v3_0_init_microcode()
403 struct amdgpu_sdma_instance *sdma = amdgpu_get_sdma_instance(ring); in sdma_v3_0_ring_insert_nop() local
407 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_insert_nop()
510 struct amdgpu_ring *sdma0 = &adev->sdma.instance[0].ring; in sdma_v3_0_gfx_stop()
511 struct amdgpu_ring *sdma1 = &adev->sdma.instance[1].ring; in sdma_v3_0_gfx_stop()
519 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
580 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
622 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
650 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
651 ring = &adev->sdma.instance[i].ring; in sdma_v3_0_gfx_resume()
751 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
752 ring = &adev->sdma.instance[i].ring; in sdma_v3_0_gfx_resume()
798 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_load_microcode()
799 if (!adev->sdma.instance[i].fw) in sdma_v3_0_load_microcode()
801 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_load_microcode()
805 (adev->sdma.instance[i].fw->data + in sdma_v3_0_load_microcode()
810 WREG32(mmSDMA0_UCODE_ADDR + sdma_offsets[i], adev->sdma.instance[i].fw_version); in sdma_v3_0_load_microcode()
1077 struct amdgpu_sdma_instance *sdma = amdgpu_get_sdma_instance(ring); in sdma_v3_0_ring_pad_ib() local
1083 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_pad_ib()
1158 adev->sdma.num_instances = 1; in sdma_v3_0_early_init()
1161 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v3_0_early_init()
1181 &adev->sdma.trap_irq); in sdma_v3_0_sw_init()
1187 &adev->sdma.illegal_inst_irq); in sdma_v3_0_sw_init()
1193 &adev->sdma.illegal_inst_irq); in sdma_v3_0_sw_init()
1203 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_sw_init()
1204 ring = &adev->sdma.instance[i].ring; in sdma_v3_0_sw_init()
1216 &adev->sdma.trap_irq, in sdma_v3_0_sw_init()
1232 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_sw_fini()
1233 amdgpu_ring_fini(&adev->sdma.instance[i].ring); in sdma_v3_0_sw_fini()
1319 adev->sdma.srbm_soft_reset = srbm_soft_reset; in sdma_v3_0_check_soft_reset()
1322 adev->sdma.srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1332 if (!adev->sdma.srbm_soft_reset) in sdma_v3_0_pre_soft_reset()
1335 srbm_soft_reset = adev->sdma.srbm_soft_reset; in sdma_v3_0_pre_soft_reset()
1351 if (!adev->sdma.srbm_soft_reset) in sdma_v3_0_post_soft_reset()
1354 srbm_soft_reset = adev->sdma.srbm_soft_reset; in sdma_v3_0_post_soft_reset()
1371 if (!adev->sdma.srbm_soft_reset) in sdma_v3_0_soft_reset()
1374 srbm_soft_reset = adev->sdma.srbm_soft_reset; in sdma_v3_0_soft_reset()
1455 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v3_0_process_trap_irq()
1468 amdgpu_fence_process(&adev->sdma.instance[1].ring); in sdma_v3_0_process_trap_irq()
1499 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1513 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1538 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1546 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1656 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_ring_funcs()
1657 adev->sdma.instance[i].ring.funcs = &sdma_v3_0_ring_funcs; in sdma_v3_0_set_ring_funcs()
1658 adev->sdma.instance[i].ring.me = i; in sdma_v3_0_set_ring_funcs()
1673 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_LAST; in sdma_v3_0_set_irq_funcs()
1674 adev->sdma.trap_irq.funcs = &sdma_v3_0_trap_irq_funcs; in sdma_v3_0_set_irq_funcs()
1675 adev->sdma.illegal_inst_irq.funcs = &sdma_v3_0_illegal_inst_irq_funcs; in sdma_v3_0_set_irq_funcs()
1741 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
1759 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_set_vm_pte_funcs()
1761 &adev->sdma.instance[i].ring; in sdma_v3_0_set_vm_pte_funcs()
1763 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v3_0_set_vm_pte_funcs()