Lines Matching refs:vcn
71 adev->vcn.num_vcn_inst = 1; in vcn_v2_0_early_init()
73 adev->vcn.num_enc_rings = 1; in vcn_v2_0_early_init()
75 adev->vcn.num_enc_rings = 2; in vcn_v2_0_early_init()
101 &adev->vcn.inst->irq); in vcn_v2_0_sw_init()
106 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_0_sw_init()
109 &adev->vcn.inst->irq); in vcn_v2_0_sw_init()
120 hdr = (const struct common_firmware_header *)adev->vcn.fw->data; in vcn_v2_0_sw_init()
122 adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw; in vcn_v2_0_sw_init()
132 ring = &adev->vcn.inst->ring_dec; in vcn_v2_0_sw_init()
135 ring->doorbell_index = adev->doorbell_index.vcn.vcn_ring0_1 << 1; in vcn_v2_0_sw_init()
138 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0, in vcn_v2_0_sw_init()
143 adev->vcn.internal.context_id = mmUVD_CONTEXT_ID_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
144 adev->vcn.internal.ib_vmid = mmUVD_LMI_RBC_IB_VMID_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
145 adev->vcn.internal.ib_bar_low = mmUVD_LMI_RBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
146 adev->vcn.internal.ib_bar_high = mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
147 adev->vcn.internal.ib_size = mmUVD_RBC_IB_SIZE_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
148 adev->vcn.internal.gp_scratch8 = mmUVD_GP_SCRATCH8_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
150 adev->vcn.internal.scratch9 = mmUVD_SCRATCH9_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
151 adev->vcn.inst->external.scratch9 = SOC15_REG_OFFSET(UVD, 0, mmUVD_SCRATCH9); in vcn_v2_0_sw_init()
152 adev->vcn.internal.data0 = mmUVD_GPCOM_VCPU_DATA0_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
153 adev->vcn.inst->external.data0 = SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0); in vcn_v2_0_sw_init()
154 adev->vcn.internal.data1 = mmUVD_GPCOM_VCPU_DATA1_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
155 adev->vcn.inst->external.data1 = SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1); in vcn_v2_0_sw_init()
156 adev->vcn.internal.cmd = mmUVD_GPCOM_VCPU_CMD_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
157 adev->vcn.inst->external.cmd = SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD); in vcn_v2_0_sw_init()
158 adev->vcn.internal.nop = mmUVD_NO_OP_INTERNAL_OFFSET; in vcn_v2_0_sw_init()
159 adev->vcn.inst->external.nop = SOC15_REG_OFFSET(UVD, 0, mmUVD_NO_OP); in vcn_v2_0_sw_init()
161 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_0_sw_init()
162 ring = &adev->vcn.inst->ring_enc[i]; in vcn_v2_0_sw_init()
165 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + i; in vcn_v2_0_sw_init()
167 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + i; in vcn_v2_0_sw_init()
169 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0, in vcn_v2_0_sw_init()
175 adev->vcn.pause_dpg_mode = vcn_v2_0_pause_dpg_mode; in vcn_v2_0_sw_init()
181 fw_shared = adev->vcn.inst->fw_shared_cpu_addr; in vcn_v2_0_sw_init()
197 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst->fw_shared_cpu_addr; in vcn_v2_0_sw_fini()
222 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; in vcn_v2_0_hw_init()
239 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_0_hw_init()
240 ring = &adev->vcn.inst->ring_enc[i]; in vcn_v2_0_hw_init()
265 cancel_delayed_work_sync(&adev->vcn.idle_work); in vcn_v2_0_hw_fini()
268 (adev->vcn.cur_state != AMD_PG_STATE_GATE && in vcn_v2_0_hw_fini()
326 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v2_0_mc_resume()
342 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume()
344 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume()
354 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume()
356 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume()
362 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume()
364 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume()
370 lower_32_bits(adev->vcn.inst->fw_shared_gpu_addr)); in vcn_v2_0_mc_resume()
372 upper_32_bits(adev->vcn.inst->fw_shared_gpu_addr)); in vcn_v2_0_mc_resume()
382 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v2_0_mc_resume_dpg_mode()
408 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
411 upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
429 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
432 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
449 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
452 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
461 lower_32_bits(adev->vcn.inst->fw_shared_gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
464 upper_32_bits(adev->vcn.inst->fw_shared_gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
790 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst->fw_shared_cpu_addr; in vcn_v2_0_start_dpg_mode()
791 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; in vcn_v2_0_start_dpg_mode()
803 adev->vcn.inst->dpg_sram_curr_addr = (uint32_t *)adev->vcn.inst->dpg_sram_cpu_addr; in vcn_v2_0_start_dpg_mode()
877 psp_update_vcn_sram(adev, 0, adev->vcn.inst->dpg_sram_gpu_addr, in vcn_v2_0_start_dpg_mode()
878 (uint32_t)((uintptr_t)adev->vcn.inst->dpg_sram_curr_addr - in vcn_v2_0_start_dpg_mode()
879 (uintptr_t)adev->vcn.inst->dpg_sram_cpu_addr)); in vcn_v2_0_start_dpg_mode()
927 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst->fw_shared_cpu_addr; in vcn_v2_0_start()
928 struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec; in vcn_v2_0_start()
937 return vcn_v2_0_start_dpg_mode(adev, adev->vcn.indirect_sram); in vcn_v2_0_start()
1078 ring = &adev->vcn.inst->ring_enc[0]; in vcn_v2_0_start()
1087 ring = &adev->vcn.inst->ring_enc[1]; in vcn_v2_0_start()
1202 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v2_0_pause_dpg_mode()
1204 adev->vcn.inst[inst_idx].pause_state.fw_based, new_state->fw_based); in vcn_v2_0_pause_dpg_mode()
1213 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst->fw_shared_cpu_addr; in vcn_v2_0_pause_dpg_mode()
1229 ring = &adev->vcn.inst->ring_enc[0]; in vcn_v2_0_pause_dpg_mode()
1239 ring = &adev->vcn.inst->ring_enc[1]; in vcn_v2_0_pause_dpg_mode()
1265 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v2_0_pause_dpg_mode()
1375 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0)); in vcn_v2_0_dec_ring_insert_start()
1377 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_insert_start()
1392 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_insert_end()
1412 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.nop, 0)); in vcn_v2_0_dec_ring_insert_nop()
1433 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.context_id, 0)); in vcn_v2_0_dec_ring_emit_fence()
1436 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0)); in vcn_v2_0_dec_ring_emit_fence()
1439 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0)); in vcn_v2_0_dec_ring_emit_fence()
1442 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_emit_fence()
1445 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0)); in vcn_v2_0_dec_ring_emit_fence()
1448 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0)); in vcn_v2_0_dec_ring_emit_fence()
1451 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_emit_fence()
1474 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.ib_vmid, 0)); in vcn_v2_0_dec_ring_emit_ib()
1477 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.ib_bar_low, 0)); in vcn_v2_0_dec_ring_emit_ib()
1479 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.ib_bar_high, 0)); in vcn_v2_0_dec_ring_emit_ib()
1481 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.ib_size, 0)); in vcn_v2_0_dec_ring_emit_ib()
1490 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0)); in vcn_v2_0_dec_ring_emit_reg_wait()
1493 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0)); in vcn_v2_0_dec_ring_emit_reg_wait()
1496 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.gp_scratch8, 0)); in vcn_v2_0_dec_ring_emit_reg_wait()
1499 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_emit_reg_wait()
1524 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0)); in vcn_v2_0_dec_ring_emit_wreg()
1527 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0)); in vcn_v2_0_dec_ring_emit_wreg()
1530 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_emit_wreg()
1546 if (ring == &adev->vcn.inst->ring_enc[0]) in vcn_v2_0_enc_ring_get_rptr()
1563 if (ring == &adev->vcn.inst->ring_enc[0]) { in vcn_v2_0_enc_ring_get_wptr()
1587 if (ring == &adev->vcn.inst->ring_enc[0]) { in vcn_v2_0_enc_ring_set_wptr()
1700 amdgpu_fence_process(&adev->vcn.inst->ring_dec); in vcn_v2_0_process_interrupt()
1703 amdgpu_fence_process(&adev->vcn.inst->ring_enc[0]); in vcn_v2_0_process_interrupt()
1706 amdgpu_fence_process(&adev->vcn.inst->ring_enc[1]); in vcn_v2_0_process_interrupt()
1727 WREG32(adev->vcn.inst[ring->me].external.scratch9, 0xCAFEDEAD); in vcn_v2_0_dec_ring_test_ring()
1731 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0)); in vcn_v2_0_dec_ring_test_ring()
1733 amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.scratch9, 0)); in vcn_v2_0_dec_ring_test_ring()
1737 tmp = RREG32(adev->vcn.inst[ring->me].external.scratch9); in vcn_v2_0_dec_ring_test_ring()
1764 adev->vcn.cur_state = AMD_PG_STATE_UNGATE; in vcn_v2_0_set_powergating_state()
1768 if (state == adev->vcn.cur_state) in vcn_v2_0_set_powergating_state()
1777 adev->vcn.cur_state = state; in vcn_v2_0_set_powergating_state()
1812 adev->vcn.inst->ring_dec.wptr = 0; in vcn_v2_0_start_mmsch()
1813 adev->vcn.inst->ring_dec.wptr_old = 0; in vcn_v2_0_start_mmsch()
1814 vcn_v2_0_dec_ring_set_wptr(&adev->vcn.inst->ring_dec); in vcn_v2_0_start_mmsch()
1816 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_0_start_mmsch()
1817 adev->vcn.inst->ring_enc[i].wptr = 0; in vcn_v2_0_start_mmsch()
1818 adev->vcn.inst->ring_enc[i].wptr_old = 0; in vcn_v2_0_start_mmsch()
1819 vcn_v2_0_enc_ring_set_wptr(&adev->vcn.inst->ring_enc[i]); in vcn_v2_0_start_mmsch()
1874 size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v2_0_start_sriov()
1896 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_start_sriov()
1900 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_start_sriov()
1914 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_start_sriov()
1918 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_start_sriov()
1929 lower_32_bits(adev->vcn.inst->gpu_addr + offset + in vcn_v2_0_start_sriov()
1934 upper_32_bits(adev->vcn.inst->gpu_addr + offset + in vcn_v2_0_start_sriov()
1943 for (r = 0; r < adev->vcn.num_enc_rings; ++r) { in vcn_v2_0_start_sriov()
1944 ring = &adev->vcn.inst->ring_enc[r]; in vcn_v2_0_start_sriov()
1957 ring = &adev->vcn.inst->ring_dec; in vcn_v2_0_start_sriov()
2069 adev->vcn.inst->ring_dec.funcs = &vcn_v2_0_dec_ring_vm_funcs; in vcn_v2_0_set_dec_ring_funcs()
2077 for (i = 0; i < adev->vcn.num_enc_rings; ++i) in vcn_v2_0_set_enc_ring_funcs()
2078 adev->vcn.inst->ring_enc[i].funcs = &vcn_v2_0_enc_ring_vm_funcs; in vcn_v2_0_set_enc_ring_funcs()
2090 adev->vcn.inst->irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v2_0_set_irq_funcs()
2091 adev->vcn.inst->irq.funcs = &vcn_v2_0_irq_funcs; in vcn_v2_0_set_irq_funcs()