Lines Matching refs:uvd
67 (!adev->uvd.fw_version || adev->uvd.fw_version >= FW_1_130_16)); in uvd_v6_0_enc_support()
95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr()
125 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_wptr()
156 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_set_wptr()
371 adev->uvd.num_uvd_inst = 1; in uvd_v6_0_early_init()
380 adev->uvd.num_enc_rings = 2; in uvd_v6_0_early_init()
396 …_id(adev, AMDGPU_IH_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_UVD_SYSTEM_MESSAGE, &adev->uvd.inst->irq); in uvd_v6_0_sw_init()
402 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v6_0_sw_init()
403 …d(adev, AMDGPU_IH_CLIENTID_LEGACY, i + VISLANDS30_IV_SRCID_UVD_ENC_GEN_PURP, &adev->uvd.inst->irq); in uvd_v6_0_sw_init()
414 for (i = 0; i < adev->uvd.num_enc_rings; ++i) in uvd_v6_0_sw_init()
415 adev->uvd.inst->ring_enc[i].funcs = NULL; in uvd_v6_0_sw_init()
417 adev->uvd.inst->irq.num_types = 1; in uvd_v6_0_sw_init()
418 adev->uvd.num_enc_rings = 0; in uvd_v6_0_sw_init()
427 ring = &adev->uvd.inst->ring; in uvd_v6_0_sw_init()
429 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0); in uvd_v6_0_sw_init()
434 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v6_0_sw_init()
435 ring = &adev->uvd.inst->ring_enc[i]; in uvd_v6_0_sw_init()
437 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0); in uvd_v6_0_sw_init()
458 for (i = 0; i < adev->uvd.num_enc_rings; ++i) in uvd_v6_0_sw_fini()
459 amdgpu_ring_fini(&adev->uvd.inst->ring_enc[i]); in uvd_v6_0_sw_fini()
475 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v6_0_hw_init()
518 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v6_0_hw_init()
519 ring = &adev->uvd.inst->ring_enc[i]; in uvd_v6_0_hw_init()
550 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v6_0_hw_fini()
598 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
600 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
614 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles); in uvd_v6_0_mc_resume()
622 WREG32(mmUVD_GP_SCRATCH4, adev->uvd.max_handles); in uvd_v6_0_mc_resume()
713 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v6_0_start()
853 ring = &adev->uvd.inst->ring_enc[0]; in uvd_v6_0_start()
860 ring = &adev->uvd.inst->ring_enc[1]; in uvd_v6_0_start()
1157 adev->uvd.inst->srbm_soft_reset = srbm_soft_reset; in uvd_v6_0_check_soft_reset()
1160 adev->uvd.inst->srbm_soft_reset = 0; in uvd_v6_0_check_soft_reset()
1169 if (!adev->uvd.inst->srbm_soft_reset) in uvd_v6_0_pre_soft_reset()
1181 if (!adev->uvd.inst->srbm_soft_reset) in uvd_v6_0_soft_reset()
1183 srbm_soft_reset = adev->uvd.inst->srbm_soft_reset; in uvd_v6_0_soft_reset()
1211 if (!adev->uvd.inst->srbm_soft_reset) in uvd_v6_0_post_soft_reset()
1237 amdgpu_fence_process(&adev->uvd.inst->ring); in uvd_v6_0_process_interrupt()
1241 amdgpu_fence_process(&adev->uvd.inst->ring_enc[0]); in uvd_v6_0_process_interrupt()
1247 amdgpu_fence_process(&adev->uvd.inst->ring_enc[1]); in uvd_v6_0_process_interrupt()
1609 adev->uvd.inst->ring.funcs = &uvd_v6_0_ring_vm_funcs; in uvd_v6_0_set_ring_funcs()
1612 adev->uvd.inst->ring.funcs = &uvd_v6_0_ring_phys_funcs; in uvd_v6_0_set_ring_funcs()
1621 for (i = 0; i < adev->uvd.num_enc_rings; ++i) in uvd_v6_0_set_enc_ring_funcs()
1622 adev->uvd.inst->ring_enc[i].funcs = &uvd_v6_0_enc_ring_vm_funcs; in uvd_v6_0_set_enc_ring_funcs()
1635 adev->uvd.inst->irq.num_types = adev->uvd.num_enc_rings + 1; in uvd_v6_0_set_irq_funcs()
1637 adev->uvd.inst->irq.num_types = 1; in uvd_v6_0_set_irq_funcs()
1639 adev->uvd.inst->irq.funcs = &uvd_v6_0_irq_funcs; in uvd_v6_0_set_irq_funcs()