Lines Matching full:sdma
261 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_insert_nop() local
265 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_insert_nop()
422 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop()
481 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable()
520 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable()
548 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume()
549 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_gfx_resume()
707 * sdma_v5_2_load_microcode - load the sDMA ME ucode
724 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode()
725 if (!adev->sdma.instance[i].fw) in sdma_v5_2_load_microcode()
728 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v5_2_load_microcode()
733 (adev->sdma.instance[i].fw->data + in sdma_v5_2_load_microcode()
744 WREG32(sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_UCODE_ADDR), adev->sdma.instance[i].fw_version); in sdma_v5_2_load_microcode()
757 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_soft_reset()
821 /* enable sdma ring preemption */ in sdma_v5_2_start()
1058 * Update PTEs by copying them from the GART using sDMA.
1086 * Update PTEs by writing them manually using sDMA.
1107 * sdma_v5_2_vm_set_pte_pde - update the page tables using sDMA
1116 * Update the page tables using sDMA.
1146 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_pad_ib() local
1152 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_pad_ib()
1189 * sdma_v5_2_ring_emit_vm_flush - vm flush using sDMA
1196 * using sDMA.
1290 /* SDMA trap event */ in sdma_v5_2_sw_init()
1291 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1294 &adev->sdma.trap_irq); in sdma_v5_2_sw_init()
1301 DRM_ERROR("Failed to load sdma firmware!\n"); in sdma_v5_2_sw_init()
1305 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1306 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_sw_init()
1317 sprintf(ring->name, "sdma%d", i); in sdma_v5_2_sw_init()
1318 r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq, in sdma_v5_2_sw_init()
1333 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_2_sw_fini()
1334 amdgpu_ring_fini(&adev->sdma.instance[i].ring); in sdma_v5_2_sw_fini()
1353 /* disable the scheduler for SDMA */ in sdma_v5_2_hw_fini()
1383 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_is_idle()
1481 DRM_DEBUG("IH: SDMA trap\n"); in sdma_v5_2_process_trap_irq()
1502 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v5_2_process_trap_irq()
1518 amdgpu_fence_process(&adev->sdma.instance[1].ring); in sdma_v5_2_process_trap_irq()
1534 amdgpu_fence_process(&adev->sdma.instance[2].ring); in sdma_v5_2_process_trap_irq()
1550 amdgpu_fence_process(&adev->sdma.instance[3].ring); in sdma_v5_2_process_trap_irq()
1580 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_update_medium_grain_clock_gating()
1582 …if (adev->sdma.instance[i].fw_version < 70 && adev->ip_versions[SDMA0_HWIP][0] == IP_VERSION(5, 2,… in sdma_v5_2_update_medium_grain_clock_gating()
1586 /* Enable sdma clock gating */ in sdma_v5_2_update_medium_grain_clock_gating()
1597 /* Disable sdma clock gating */ in sdma_v5_2_update_medium_grain_clock_gating()
1617 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_update_medium_grain_light_sleep()
1619 …if (adev->sdma.instance[i].fw_version < 70 && adev->ip_versions[SDMA0_HWIP][0] == IP_VERSION(5, 2,… in sdma_v5_2_update_medium_grain_light_sleep()
1623 /* Enable sdma mem light sleep */ in sdma_v5_2_update_medium_grain_light_sleep()
1630 /* Disable sdma mem light sleep */ in sdma_v5_2_update_medium_grain_light_sleep()
1753 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_set_ring_funcs()
1754 adev->sdma.instance[i].ring.funcs = &sdma_v5_2_ring_funcs; in sdma_v5_2_set_ring_funcs()
1755 adev->sdma.instance[i].ring.me = i; in sdma_v5_2_set_ring_funcs()
1770 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_INSTANCE0 + in sdma_v5_2_set_irq_funcs()
1771 adev->sdma.num_instances; in sdma_v5_2_set_irq_funcs()
1772 adev->sdma.trap_irq.funcs = &sdma_v5_2_trap_irq_funcs; in sdma_v5_2_set_irq_funcs()
1773 adev->sdma.illegal_inst_irq.funcs = &sdma_v5_2_illegal_inst_irq_funcs; in sdma_v5_2_set_irq_funcs()
1777 * sdma_v5_2_emit_copy_buffer - copy buffer using the sDMA engine
1807 * sdma_v5_2_emit_fill_buffer - fill buffer using the sDMA engine
1842 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v5_2_set_buffer_funcs()
1859 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_set_vm_pte_funcs()
1861 &adev->sdma.instance[i].ring.sched; in sdma_v5_2_set_vm_pte_funcs()
1863 adev->vm_manager.vm_pte_num_scheds = adev->sdma.num_instances; in sdma_v5_2_set_vm_pte_funcs()