Lines Matching refs:vcn

79 		adev->vcn.num_vcn_inst = 2;  in vcn_v2_5_early_init()
80 adev->vcn.harvest_config = 0; in vcn_v2_5_early_init()
81 adev->vcn.num_enc_rings = 1; in vcn_v2_5_early_init()
85 adev->vcn.num_vcn_inst = VCN25_MAX_HW_INSTANCES_ARCTURUS; in vcn_v2_5_early_init()
86 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_5_early_init()
89 adev->vcn.harvest_config |= 1 << i; in vcn_v2_5_early_init()
91 if (adev->vcn.harvest_config == (AMDGPU_VCN_HARVEST_VCN0 | in vcn_v2_5_early_init()
96 adev->vcn.num_enc_rings = 2; in vcn_v2_5_early_init()
119 for (j = 0; j < adev->vcn.num_vcn_inst; j++) { in vcn_v2_5_sw_init()
120 if (adev->vcn.harvest_config & (1 << j)) in vcn_v2_5_sw_init()
124 VCN_2_0__SRCID__UVD_SYSTEM_MESSAGE_INTERRUPT, &adev->vcn.inst[j].irq); in vcn_v2_5_sw_init()
129 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_5_sw_init()
131 i + VCN_2_0__SRCID__UVD_ENC_GENERAL_PURPOSE, &adev->vcn.inst[j].irq); in vcn_v2_5_sw_init()
143 hdr = (const struct common_firmware_header *)adev->vcn.fw->data; in vcn_v2_5_sw_init()
145 adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw; in vcn_v2_5_sw_init()
149 if (adev->vcn.num_vcn_inst == VCN25_MAX_HW_INSTANCES_ARCTURUS) { in vcn_v2_5_sw_init()
151 adev->firmware.ucode[AMDGPU_UCODE_ID_VCN1].fw = adev->vcn.fw; in vcn_v2_5_sw_init()
162 for (j = 0; j < adev->vcn.num_vcn_inst; j++) { in vcn_v2_5_sw_init()
165 if (adev->vcn.harvest_config & (1 << j)) in vcn_v2_5_sw_init()
167 adev->vcn.internal.context_id = mmUVD_CONTEXT_ID_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
168 adev->vcn.internal.ib_vmid = mmUVD_LMI_RBC_IB_VMID_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
169 adev->vcn.internal.ib_bar_low = mmUVD_LMI_RBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
170 adev->vcn.internal.ib_bar_high = mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
171 adev->vcn.internal.ib_size = mmUVD_RBC_IB_SIZE_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
172 adev->vcn.internal.gp_scratch8 = mmUVD_GP_SCRATCH8_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
174 adev->vcn.internal.scratch9 = mmUVD_SCRATCH9_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
175 adev->vcn.inst[j].external.scratch9 = SOC15_REG_OFFSET(VCN, j, mmUVD_SCRATCH9); in vcn_v2_5_sw_init()
176 adev->vcn.internal.data0 = mmUVD_GPCOM_VCPU_DATA0_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
177 adev->vcn.inst[j].external.data0 = SOC15_REG_OFFSET(VCN, j, mmUVD_GPCOM_VCPU_DATA0); in vcn_v2_5_sw_init()
178 adev->vcn.internal.data1 = mmUVD_GPCOM_VCPU_DATA1_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
179 adev->vcn.inst[j].external.data1 = SOC15_REG_OFFSET(VCN, j, mmUVD_GPCOM_VCPU_DATA1); in vcn_v2_5_sw_init()
180 adev->vcn.internal.cmd = mmUVD_GPCOM_VCPU_CMD_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
181 adev->vcn.inst[j].external.cmd = SOC15_REG_OFFSET(VCN, j, mmUVD_GPCOM_VCPU_CMD); in vcn_v2_5_sw_init()
182 adev->vcn.internal.nop = mmUVD_NO_OP_INTERNAL_OFFSET; in vcn_v2_5_sw_init()
183 adev->vcn.inst[j].external.nop = SOC15_REG_OFFSET(VCN, j, mmUVD_NO_OP); in vcn_v2_5_sw_init()
185 ring = &adev->vcn.inst[j].ring_dec; in vcn_v2_5_sw_init()
188 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + in vcn_v2_5_sw_init()
191 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq, in vcn_v2_5_sw_init()
196 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_5_sw_init()
197 ring = &adev->vcn.inst[j].ring_enc[i]; in vcn_v2_5_sw_init()
200 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + in vcn_v2_5_sw_init()
205 &adev->vcn.inst[j].irq, 0, in vcn_v2_5_sw_init()
211 fw_shared = adev->vcn.inst[j].fw_shared_cpu_addr; in vcn_v2_5_sw_init()
222 adev->vcn.pause_dpg_mode = vcn_v2_5_pause_dpg_mode; in vcn_v2_5_sw_init()
240 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_5_sw_fini()
241 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_sw_fini()
243 fw_shared = adev->vcn.inst[i].fw_shared_cpu_addr; in vcn_v2_5_sw_fini()
275 for (j = 0; j < adev->vcn.num_vcn_inst; ++j) { in vcn_v2_5_hw_init()
276 if (adev->vcn.harvest_config & (1 << j)) in vcn_v2_5_hw_init()
280 adev->vcn.inst[j].ring_enc[0].sched.ready = true; in vcn_v2_5_hw_init()
281 adev->vcn.inst[j].ring_enc[1].sched.ready = false; in vcn_v2_5_hw_init()
282 adev->vcn.inst[j].ring_enc[2].sched.ready = false; in vcn_v2_5_hw_init()
283 adev->vcn.inst[j].ring_dec.sched.ready = true; in vcn_v2_5_hw_init()
286 ring = &adev->vcn.inst[j].ring_dec; in vcn_v2_5_hw_init()
295 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_5_hw_init()
296 ring = &adev->vcn.inst[j].ring_enc[i]; in vcn_v2_5_hw_init()
324 cancel_delayed_work_sync(&adev->vcn.idle_work); in vcn_v2_5_hw_fini()
326 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_hw_fini()
327 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_hw_fini()
331 (adev->vcn.cur_state != AMD_PG_STATE_GATE && in vcn_v2_5_hw_fini()
390 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v2_5_mc_resume()
394 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_mc_resume()
395 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_mc_resume()
407 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
409 upper_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
418 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
420 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
426 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
428 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
434 lower_32_bits(adev->vcn.inst[i].fw_shared_gpu_addr)); in vcn_v2_5_mc_resume()
436 upper_32_bits(adev->vcn.inst[i].fw_shared_gpu_addr)); in vcn_v2_5_mc_resume()
445 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v2_5_mc_resume_dpg_mode()
471 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
474 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
492 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
495 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
512 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
515 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
524 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared_gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
527 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared_gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
551 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_disable_clock_gating()
552 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_disable_clock_gating()
716 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_enable_clock_gating()
717 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_enable_clock_gating()
768 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst[inst_idx].fw_shared_cpu_addr; in vcn_v2_5_start_dpg_mode()
782 …adev->vcn.inst[inst_idx].dpg_sram_curr_addr = (uint32_t *)adev->vcn.inst[inst_idx].dpg_sram_cpu_ad… in vcn_v2_5_start_dpg_mode()
860 psp_update_vcn_sram(adev, inst_idx, adev->vcn.inst[inst_idx].dpg_sram_gpu_addr, in vcn_v2_5_start_dpg_mode()
861 (uint32_t)((uintptr_t)adev->vcn.inst[inst_idx].dpg_sram_curr_addr - in vcn_v2_5_start_dpg_mode()
862 (uintptr_t)adev->vcn.inst[inst_idx].dpg_sram_cpu_addr)); in vcn_v2_5_start_dpg_mode()
864 ring = &adev->vcn.inst[inst_idx].ring_dec; in vcn_v2_5_start_dpg_mode()
919 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_start()
920 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_start()
923 r = vcn_v2_5_start_dpg_mode(adev, i, adev->vcn.indirect_sram); in vcn_v2_5_start()
942 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_start()
943 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_start()
991 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_start()
992 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst[i].fw_shared_cpu_addr; in vcn_v2_5_start()
993 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_start()
1056 ring = &adev->vcn.inst[i].ring_dec; in vcn_v2_5_start()
1082 ring = &adev->vcn.inst[i].ring_enc[0]; in vcn_v2_5_start()
1091 ring = &adev->vcn.inst[i].ring_enc[1]; in vcn_v2_5_start()
1178 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_sriov_start()
1189 size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); in vcn_v2_5_sriov_start()
1207 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_sriov_start()
1211 upper_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_sriov_start()
1224 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_sriov_start()
1228 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_sriov_start()
1238 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + in vcn_v2_5_sriov_start()
1243 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + in vcn_v2_5_sriov_start()
1252 ring = &adev->vcn.inst[i].ring_enc[0]; in vcn_v2_5_sriov_start()
1265 ring = &adev->vcn.inst[i].ring_dec; in vcn_v2_5_sriov_start()
1332 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_stop()
1333 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_stop()
1403 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v2_5_pause_dpg_mode()
1405 adev->vcn.inst[inst_idx].pause_state.fw_based, new_state->fw_based); in vcn_v2_5_pause_dpg_mode()
1414 volatile struct amdgpu_fw_shared *fw_shared = adev->vcn.inst[inst_idx].fw_shared_cpu_addr; in vcn_v2_5_pause_dpg_mode()
1432 ring = &adev->vcn.inst[inst_idx].ring_enc[0]; in vcn_v2_5_pause_dpg_mode()
1442 ring = &adev->vcn.inst[inst_idx].ring_enc[1]; in vcn_v2_5_pause_dpg_mode()
1464 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v2_5_pause_dpg_mode()
1591 if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) in vcn_v2_5_enc_ring_get_rptr()
1608 if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) { in vcn_v2_5_enc_ring_get_wptr()
1632 if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) { in vcn_v2_5_enc_ring_set_wptr()
1713 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_set_dec_ring_funcs()
1714 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_set_dec_ring_funcs()
1717 adev->vcn.inst[i].ring_dec.funcs = &vcn_v2_5_dec_ring_vm_funcs; in vcn_v2_5_set_dec_ring_funcs()
1719 adev->vcn.inst[i].ring_dec.funcs = &vcn_v2_6_dec_ring_vm_funcs; in vcn_v2_5_set_dec_ring_funcs()
1720 adev->vcn.inst[i].ring_dec.me = i; in vcn_v2_5_set_dec_ring_funcs()
1729 for (j = 0; j < adev->vcn.num_vcn_inst; ++j) { in vcn_v2_5_set_enc_ring_funcs()
1730 if (adev->vcn.harvest_config & (1 << j)) in vcn_v2_5_set_enc_ring_funcs()
1732 for (i = 0; i < adev->vcn.num_enc_rings; ++i) { in vcn_v2_5_set_enc_ring_funcs()
1734 adev->vcn.inst[j].ring_enc[i].funcs = &vcn_v2_5_enc_ring_vm_funcs; in vcn_v2_5_set_enc_ring_funcs()
1736 adev->vcn.inst[j].ring_enc[i].funcs = &vcn_v2_6_enc_ring_vm_funcs; in vcn_v2_5_set_enc_ring_funcs()
1737 adev->vcn.inst[j].ring_enc[i].me = j; in vcn_v2_5_set_enc_ring_funcs()
1748 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_is_idle()
1749 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_is_idle()
1762 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_wait_for_idle()
1763 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_wait_for_idle()
1803 if(state == adev->vcn.cur_state) in vcn_v2_5_set_powergating_state()
1812 adev->vcn.cur_state = state; in vcn_v2_5_set_powergating_state()
1847 amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_dec); in vcn_v2_5_process_interrupt()
1850 amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_enc[0]); in vcn_v2_5_process_interrupt()
1853 amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_enc[1]); in vcn_v2_5_process_interrupt()
1873 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_set_irq_funcs()
1874 if (adev->vcn.harvest_config & (1 << i)) in vcn_v2_5_set_irq_funcs()
1876 adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v2_5_set_irq_funcs()
1877 adev->vcn.inst[i].irq.funcs = &vcn_v2_5_irq_funcs; in vcn_v2_5_set_irq_funcs()