Lines Matching refs:adev
42 int amdgpu_gfx_mec_queue_to_bit(struct amdgpu_device *adev, int mec, in amdgpu_gfx_mec_queue_to_bit() argument
47 bit += mec * adev->gfx.mec.num_pipe_per_mec in amdgpu_gfx_mec_queue_to_bit()
48 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
49 bit += pipe * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
55 void amdgpu_queue_mask_bit_to_mec_queue(struct amdgpu_device *adev, int bit, in amdgpu_queue_mask_bit_to_mec_queue() argument
58 *queue = bit % adev->gfx.mec.num_queue_per_pipe; in amdgpu_queue_mask_bit_to_mec_queue()
59 *pipe = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue()
60 % adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue()
61 *mec = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue()
62 / adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue()
66 bool amdgpu_gfx_is_mec_queue_enabled(struct amdgpu_device *adev, in amdgpu_gfx_is_mec_queue_enabled() argument
69 return test_bit(amdgpu_gfx_mec_queue_to_bit(adev, mec, pipe, queue), in amdgpu_gfx_is_mec_queue_enabled()
70 adev->gfx.mec_bitmap[xcc_id].queue_bitmap); in amdgpu_gfx_is_mec_queue_enabled()
73 int amdgpu_gfx_me_queue_to_bit(struct amdgpu_device *adev, in amdgpu_gfx_me_queue_to_bit() argument
78 bit += me * adev->gfx.me.num_pipe_per_me in amdgpu_gfx_me_queue_to_bit()
79 * adev->gfx.me.num_queue_per_pipe; in amdgpu_gfx_me_queue_to_bit()
80 bit += pipe * adev->gfx.me.num_queue_per_pipe; in amdgpu_gfx_me_queue_to_bit()
86 void amdgpu_gfx_bit_to_me_queue(struct amdgpu_device *adev, int bit, in amdgpu_gfx_bit_to_me_queue() argument
89 *queue = bit % adev->gfx.me.num_queue_per_pipe; in amdgpu_gfx_bit_to_me_queue()
90 *pipe = (bit / adev->gfx.me.num_queue_per_pipe) in amdgpu_gfx_bit_to_me_queue()
91 % adev->gfx.me.num_pipe_per_me; in amdgpu_gfx_bit_to_me_queue()
92 *me = (bit / adev->gfx.me.num_queue_per_pipe) in amdgpu_gfx_bit_to_me_queue()
93 / adev->gfx.me.num_pipe_per_me; in amdgpu_gfx_bit_to_me_queue()
96 bool amdgpu_gfx_is_me_queue_enabled(struct amdgpu_device *adev, in amdgpu_gfx_is_me_queue_enabled() argument
99 return test_bit(amdgpu_gfx_me_queue_to_bit(adev, me, pipe, queue), in amdgpu_gfx_is_me_queue_enabled()
100 adev->gfx.me.queue_bitmap); in amdgpu_gfx_is_me_queue_enabled()
148 static bool amdgpu_gfx_is_graphics_multipipe_capable(struct amdgpu_device *adev) in amdgpu_gfx_is_graphics_multipipe_capable() argument
150 return amdgpu_async_gfx_ring && adev->gfx.me.num_pipe_per_me > 1; in amdgpu_gfx_is_graphics_multipipe_capable()
153 static bool amdgpu_gfx_is_compute_multipipe_capable(struct amdgpu_device *adev) in amdgpu_gfx_is_compute_multipipe_capable() argument
161 if (adev->ip_versions[GC_HWIP][0] > IP_VERSION(9, 0, 0)) in amdgpu_gfx_is_compute_multipipe_capable()
166 if (adev->asic_type == CHIP_POLARIS11) in amdgpu_gfx_is_compute_multipipe_capable()
169 return adev->gfx.mec.num_mec > 1; in amdgpu_gfx_is_compute_multipipe_capable()
172 bool amdgpu_gfx_is_high_priority_graphics_queue(struct amdgpu_device *adev, in amdgpu_gfx_is_high_priority_graphics_queue() argument
181 if (amdgpu_gfx_is_graphics_multipipe_capable(adev) && in amdgpu_gfx_is_high_priority_graphics_queue()
182 adev->gfx.num_gfx_rings > 1 && pipe == 1 && queue == 0) { in amdgpu_gfx_is_high_priority_graphics_queue()
186 bit = amdgpu_gfx_me_queue_to_bit(adev, me, pipe, queue); in amdgpu_gfx_is_high_priority_graphics_queue()
187 if (ring == &adev->gfx.gfx_ring[bit]) in amdgpu_gfx_is_high_priority_graphics_queue()
194 bool amdgpu_gfx_is_high_priority_compute_queue(struct amdgpu_device *adev, in amdgpu_gfx_is_high_priority_compute_queue() argument
200 if (adev->gfx.num_compute_rings > 1 && in amdgpu_gfx_is_high_priority_compute_queue()
201 ring == &adev->gfx.compute_ring[0]) in amdgpu_gfx_is_high_priority_compute_queue()
207 void amdgpu_gfx_compute_queue_acquire(struct amdgpu_device *adev) in amdgpu_gfx_compute_queue_acquire() argument
210 bool multipipe_policy = amdgpu_gfx_is_compute_multipipe_capable(adev); in amdgpu_gfx_compute_queue_acquire()
211 int max_queues_per_mec = min(adev->gfx.mec.num_pipe_per_mec * in amdgpu_gfx_compute_queue_acquire()
212 adev->gfx.mec.num_queue_per_pipe, in amdgpu_gfx_compute_queue_acquire()
213 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire()
214 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_compute_queue_acquire()
221 pipe = i % adev->gfx.mec.num_pipe_per_mec; in amdgpu_gfx_compute_queue_acquire()
222 queue = (i / adev->gfx.mec.num_pipe_per_mec) % in amdgpu_gfx_compute_queue_acquire()
223 adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_compute_queue_acquire()
225 set_bit(pipe * adev->gfx.mec.num_queue_per_pipe + queue, in amdgpu_gfx_compute_queue_acquire()
226 adev->gfx.mec_bitmap[j].queue_bitmap); in amdgpu_gfx_compute_queue_acquire()
233 set_bit(i, adev->gfx.mec_bitmap[j].queue_bitmap); in amdgpu_gfx_compute_queue_acquire()
238 dev_dbg(adev->dev, "mec queue bitmap weight=%d\n", in amdgpu_gfx_compute_queue_acquire()
239 bitmap_weight(adev->gfx.mec_bitmap[j].queue_bitmap, AMDGPU_MAX_COMPUTE_QUEUES)); in amdgpu_gfx_compute_queue_acquire()
243 void amdgpu_gfx_graphics_queue_acquire(struct amdgpu_device *adev) in amdgpu_gfx_graphics_queue_acquire() argument
246 bool multipipe_policy = amdgpu_gfx_is_graphics_multipipe_capable(adev); in amdgpu_gfx_graphics_queue_acquire()
247 int max_queues_per_me = adev->gfx.me.num_pipe_per_me * in amdgpu_gfx_graphics_queue_acquire()
248 adev->gfx.me.num_queue_per_pipe; in amdgpu_gfx_graphics_queue_acquire()
254 pipe = i % adev->gfx.me.num_pipe_per_me; in amdgpu_gfx_graphics_queue_acquire()
255 queue = (i / adev->gfx.me.num_pipe_per_me) % in amdgpu_gfx_graphics_queue_acquire()
256 adev->gfx.me.num_queue_per_pipe; in amdgpu_gfx_graphics_queue_acquire()
258 set_bit(pipe * adev->gfx.me.num_queue_per_pipe + queue, in amdgpu_gfx_graphics_queue_acquire()
259 adev->gfx.me.queue_bitmap); in amdgpu_gfx_graphics_queue_acquire()
263 set_bit(i, adev->gfx.me.queue_bitmap); in amdgpu_gfx_graphics_queue_acquire()
267 adev->gfx.num_gfx_rings = in amdgpu_gfx_graphics_queue_acquire()
268 bitmap_weight(adev->gfx.me.queue_bitmap, AMDGPU_MAX_GFX_QUEUES); in amdgpu_gfx_graphics_queue_acquire()
271 static int amdgpu_gfx_kiq_acquire(struct amdgpu_device *adev, in amdgpu_gfx_kiq_acquire() argument
277 queue_bit = adev->gfx.mec.num_mec in amdgpu_gfx_kiq_acquire()
278 * adev->gfx.mec.num_pipe_per_mec in amdgpu_gfx_kiq_acquire()
279 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_kiq_acquire()
282 if (test_bit(queue_bit, adev->gfx.mec_bitmap[xcc_id].queue_bitmap)) in amdgpu_gfx_kiq_acquire()
285 amdgpu_queue_mask_bit_to_mec_queue(adev, queue_bit, &mec, &pipe, &queue); in amdgpu_gfx_kiq_acquire()
302 dev_err(adev->dev, "Failed to find a queue for KIQ\n"); in amdgpu_gfx_kiq_acquire()
306 int amdgpu_gfx_kiq_init_ring(struct amdgpu_device *adev, in amdgpu_gfx_kiq_init_ring() argument
310 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init_ring()
315 ring->adev = NULL; in amdgpu_gfx_kiq_init_ring()
321 (adev->doorbell_index.kiq + in amdgpu_gfx_kiq_init_ring()
322 xcc_id * adev->doorbell_index.xcc_doorbell_range) in amdgpu_gfx_kiq_init_ring()
325 r = amdgpu_gfx_kiq_acquire(adev, ring, xcc_id); in amdgpu_gfx_kiq_init_ring()
332 r = amdgpu_ring_init(adev, ring, 1024, irq, AMDGPU_CP_KIQ_IRQ_DRIVER0, in amdgpu_gfx_kiq_init_ring()
335 dev_warn(adev->dev, "(%d) failed to init kiq ring\n", r); in amdgpu_gfx_kiq_init_ring()
345 void amdgpu_gfx_kiq_fini(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_kiq_fini() argument
347 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_fini()
352 int amdgpu_gfx_kiq_init(struct amdgpu_device *adev, in amdgpu_gfx_kiq_init() argument
357 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init()
359 r = amdgpu_bo_create_kernel(adev, hpd_size, PAGE_SIZE, in amdgpu_gfx_kiq_init()
363 dev_warn(adev->dev, "failed to create KIQ bo (%d).\n", r); in amdgpu_gfx_kiq_init()
371 dev_warn(adev->dev, "(%d) reserve kiq eop bo failed\n", r); in amdgpu_gfx_kiq_init()
379 int amdgpu_gfx_mqd_sw_init(struct amdgpu_device *adev, in amdgpu_gfx_mqd_sw_init() argument
383 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_mqd_sw_init()
388 if (adev->ip_versions[GC_HWIP][0] >= IP_VERSION(10, 0, 0)) in amdgpu_gfx_mqd_sw_init()
392 if (!adev->enable_mes_kiq && !ring->mqd_obj) { in amdgpu_gfx_mqd_sw_init()
398 r = amdgpu_bo_create_kernel(adev, mqd_size, PAGE_SIZE, in amdgpu_gfx_mqd_sw_init()
405 dev_warn(adev->dev, "failed to create ring mqd ob (%d)", r); in amdgpu_gfx_mqd_sw_init()
412 dev_warn(adev->dev, in amdgpu_gfx_mqd_sw_init()
418 if (adev->asic_type >= CHIP_NAVI10 && amdgpu_async_gfx_ring) { in amdgpu_gfx_mqd_sw_init()
420 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in amdgpu_gfx_mqd_sw_init()
421 ring = &adev->gfx.gfx_ring[i]; in amdgpu_gfx_mqd_sw_init()
423 r = amdgpu_bo_create_kernel(adev, mqd_size, PAGE_SIZE, in amdgpu_gfx_mqd_sw_init()
427 dev_warn(adev->dev, "failed to create ring mqd bo (%d)", r); in amdgpu_gfx_mqd_sw_init()
433 adev->gfx.me.mqd_backup[i] = kmalloc(mqd_size, GFP_KERNEL); in amdgpu_gfx_mqd_sw_init()
434 if (!adev->gfx.me.mqd_backup[i]) { in amdgpu_gfx_mqd_sw_init()
435 dev_warn(adev->dev, "no memory to create MQD backup for ring %s\n", ring->name); in amdgpu_gfx_mqd_sw_init()
443 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_init()
444 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_init()
445 ring = &adev->gfx.compute_ring[j]; in amdgpu_gfx_mqd_sw_init()
447 r = amdgpu_bo_create_kernel(adev, mqd_size, PAGE_SIZE, in amdgpu_gfx_mqd_sw_init()
451 dev_warn(adev->dev, "failed to create ring mqd bo (%d)", r); in amdgpu_gfx_mqd_sw_init()
457 adev->gfx.mec.mqd_backup[j] = kmalloc(mqd_size, GFP_KERNEL); in amdgpu_gfx_mqd_sw_init()
458 if (!adev->gfx.mec.mqd_backup[j]) { in amdgpu_gfx_mqd_sw_init()
459 dev_warn(adev->dev, "no memory to create MQD backup for ring %s\n", ring->name); in amdgpu_gfx_mqd_sw_init()
468 void amdgpu_gfx_mqd_sw_fini(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_mqd_sw_fini() argument
472 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_mqd_sw_fini()
474 if (adev->asic_type >= CHIP_NAVI10 && amdgpu_async_gfx_ring) { in amdgpu_gfx_mqd_sw_fini()
475 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in amdgpu_gfx_mqd_sw_fini()
476 ring = &adev->gfx.gfx_ring[i]; in amdgpu_gfx_mqd_sw_fini()
477 kfree(adev->gfx.me.mqd_backup[i]); in amdgpu_gfx_mqd_sw_fini()
484 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_fini()
485 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_fini()
486 ring = &adev->gfx.compute_ring[j]; in amdgpu_gfx_mqd_sw_fini()
487 kfree(adev->gfx.mec.mqd_backup[j]); in amdgpu_gfx_mqd_sw_fini()
500 int amdgpu_gfx_disable_kcq(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_disable_kcq() argument
502 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_disable_kcq()
512 adev->gfx.num_compute_rings)) { in amdgpu_gfx_disable_kcq()
517 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_disable_kcq()
518 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_disable_kcq()
520 &adev->gfx.compute_ring[j], in amdgpu_gfx_disable_kcq()
524 if (kiq_ring->sched.ready && !adev->job_hang) in amdgpu_gfx_disable_kcq()
531 int amdgpu_gfx_disable_kgq(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_disable_kgq() argument
533 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_disable_kgq()
542 if (amdgpu_gfx_is_master_xcc(adev, xcc_id)) { in amdgpu_gfx_disable_kgq()
544 adev->gfx.num_gfx_rings)) { in amdgpu_gfx_disable_kgq()
549 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in amdgpu_gfx_disable_kgq()
550 j = i + xcc_id * adev->gfx.num_gfx_rings; in amdgpu_gfx_disable_kgq()
552 &adev->gfx.gfx_ring[j], in amdgpu_gfx_disable_kgq()
557 if (adev->gfx.kiq[0].ring.sched.ready && !adev->job_hang) in amdgpu_gfx_disable_kgq()
564 int amdgpu_queue_mask_bit_to_set_resource_bit(struct amdgpu_device *adev, in amdgpu_queue_mask_bit_to_set_resource_bit() argument
570 amdgpu_queue_mask_bit_to_mec_queue(adev, queue_bit, &mec, &pipe, &queue); in amdgpu_queue_mask_bit_to_set_resource_bit()
577 int amdgpu_gfx_enable_kcq(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_enable_kcq() argument
579 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_enable_kcq()
588 if (!test_bit(i, adev->gfx.mec_bitmap[xcc_id].queue_bitmap)) in amdgpu_gfx_enable_kcq()
599 queue_mask |= (1ull << amdgpu_queue_mask_bit_to_set_resource_bit(adev, i)); in amdgpu_gfx_enable_kcq()
604 amdgpu_device_flush_hdp(adev, NULL); in amdgpu_gfx_enable_kcq()
608 adev->gfx.num_compute_rings + in amdgpu_gfx_enable_kcq()
616 if (adev->enable_mes) in amdgpu_gfx_enable_kcq()
620 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_enable_kcq()
621 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_enable_kcq()
623 &adev->gfx.compute_ring[j]); in amdgpu_gfx_enable_kcq()
634 int amdgpu_gfx_enable_kgq(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_enable_kgq() argument
636 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_enable_kgq()
643 amdgpu_device_flush_hdp(adev, NULL); in amdgpu_gfx_enable_kgq()
647 if (amdgpu_gfx_is_master_xcc(adev, xcc_id)) { in amdgpu_gfx_enable_kgq()
649 adev->gfx.num_gfx_rings); in amdgpu_gfx_enable_kgq()
656 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in amdgpu_gfx_enable_kgq()
657 j = i + xcc_id * adev->gfx.num_gfx_rings; in amdgpu_gfx_enable_kgq()
659 &adev->gfx.gfx_ring[j]); in amdgpu_gfx_enable_kgq()
682 void amdgpu_gfx_off_ctrl(struct amdgpu_device *adev, bool enable) in amdgpu_gfx_off_ctrl() argument
686 if (!(adev->pm.pp_feature & PP_GFXOFF_MASK)) in amdgpu_gfx_off_ctrl()
689 mutex_lock(&adev->gfx.gfx_off_mutex); in amdgpu_gfx_off_ctrl()
696 if (WARN_ON_ONCE(adev->gfx.gfx_off_req_count == 0)) in amdgpu_gfx_off_ctrl()
699 adev->gfx.gfx_off_req_count--; in amdgpu_gfx_off_ctrl()
701 if (adev->gfx.gfx_off_req_count == 0 && in amdgpu_gfx_off_ctrl()
702 !adev->gfx.gfx_off_state) { in amdgpu_gfx_off_ctrl()
703 schedule_delayed_work(&adev->gfx.gfx_off_delay_work, in amdgpu_gfx_off_ctrl()
707 if (adev->gfx.gfx_off_req_count == 0) { in amdgpu_gfx_off_ctrl()
708 cancel_delayed_work_sync(&adev->gfx.gfx_off_delay_work); in amdgpu_gfx_off_ctrl()
710 if (adev->gfx.gfx_off_state && in amdgpu_gfx_off_ctrl()
711 !amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_GFX, false)) { in amdgpu_gfx_off_ctrl()
712 adev->gfx.gfx_off_state = false; in amdgpu_gfx_off_ctrl()
714 if (adev->gfx.funcs->init_spm_golden) { in amdgpu_gfx_off_ctrl()
715 dev_dbg(adev->dev, in amdgpu_gfx_off_ctrl()
717 amdgpu_gfx_init_spm_golden(adev); in amdgpu_gfx_off_ctrl()
722 adev->gfx.gfx_off_req_count++; in amdgpu_gfx_off_ctrl()
726 mutex_unlock(&adev->gfx.gfx_off_mutex); in amdgpu_gfx_off_ctrl()
729 int amdgpu_set_gfx_off_residency(struct amdgpu_device *adev, bool value) in amdgpu_set_gfx_off_residency() argument
733 mutex_lock(&adev->gfx.gfx_off_mutex); in amdgpu_set_gfx_off_residency()
735 r = amdgpu_dpm_set_residency_gfxoff(adev, value); in amdgpu_set_gfx_off_residency()
737 mutex_unlock(&adev->gfx.gfx_off_mutex); in amdgpu_set_gfx_off_residency()
742 int amdgpu_get_gfx_off_residency(struct amdgpu_device *adev, u32 *value) in amdgpu_get_gfx_off_residency() argument
746 mutex_lock(&adev->gfx.gfx_off_mutex); in amdgpu_get_gfx_off_residency()
748 r = amdgpu_dpm_get_residency_gfxoff(adev, value); in amdgpu_get_gfx_off_residency()
750 mutex_unlock(&adev->gfx.gfx_off_mutex); in amdgpu_get_gfx_off_residency()
755 int amdgpu_get_gfx_off_entrycount(struct amdgpu_device *adev, u64 *value) in amdgpu_get_gfx_off_entrycount() argument
759 mutex_lock(&adev->gfx.gfx_off_mutex); in amdgpu_get_gfx_off_entrycount()
761 r = amdgpu_dpm_get_entrycount_gfxoff(adev, value); in amdgpu_get_gfx_off_entrycount()
763 mutex_unlock(&adev->gfx.gfx_off_mutex); in amdgpu_get_gfx_off_entrycount()
768 int amdgpu_get_gfx_off_status(struct amdgpu_device *adev, uint32_t *value) in amdgpu_get_gfx_off_status() argument
773 mutex_lock(&adev->gfx.gfx_off_mutex); in amdgpu_get_gfx_off_status()
775 r = amdgpu_dpm_get_status_gfxoff(adev, value); in amdgpu_get_gfx_off_status()
777 mutex_unlock(&adev->gfx.gfx_off_mutex); in amdgpu_get_gfx_off_status()
782 int amdgpu_gfx_ras_late_init(struct amdgpu_device *adev, struct ras_common_if *ras_block) in amdgpu_gfx_ras_late_init() argument
786 if (amdgpu_ras_is_supported(adev, ras_block->block)) { in amdgpu_gfx_ras_late_init()
787 if (!amdgpu_persistent_edc_harvesting_supported(adev)) in amdgpu_gfx_ras_late_init()
788 amdgpu_ras_reset_error_status(adev, AMDGPU_RAS_BLOCK__GFX); in amdgpu_gfx_ras_late_init()
790 r = amdgpu_ras_block_late_init(adev, ras_block); in amdgpu_gfx_ras_late_init()
794 if (adev->gfx.cp_ecc_error_irq.funcs) { in amdgpu_gfx_ras_late_init()
795 r = amdgpu_irq_get(adev, &adev->gfx.cp_ecc_error_irq, 0); in amdgpu_gfx_ras_late_init()
800 amdgpu_ras_feature_enable_on_boot(adev, ras_block, 0); in amdgpu_gfx_ras_late_init()
805 amdgpu_ras_block_late_fini(adev, ras_block); in amdgpu_gfx_ras_late_init()
809 int amdgpu_gfx_ras_sw_init(struct amdgpu_device *adev) in amdgpu_gfx_ras_sw_init() argument
817 if (!adev->gfx.ras) in amdgpu_gfx_ras_sw_init()
820 ras = adev->gfx.ras; in amdgpu_gfx_ras_sw_init()
822 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_gfx_ras_sw_init()
824 dev_err(adev->dev, "Failed to register gfx ras block!\n"); in amdgpu_gfx_ras_sw_init()
831 adev->gfx.ras_if = &ras->ras_block.ras_comm; in amdgpu_gfx_ras_sw_init()
844 int amdgpu_gfx_poison_consumption_handler(struct amdgpu_device *adev, in amdgpu_gfx_poison_consumption_handler() argument
847 if (adev->gfx.ras && adev->gfx.ras->poison_consumption_handler) in amdgpu_gfx_poison_consumption_handler()
848 return adev->gfx.ras->poison_consumption_handler(adev, entry); in amdgpu_gfx_poison_consumption_handler()
853 int amdgpu_gfx_process_ras_data_cb(struct amdgpu_device *adev, in amdgpu_gfx_process_ras_data_cb() argument
863 if (!amdgpu_ras_is_supported(adev, AMDGPU_RAS_BLOCK__GFX)) { in amdgpu_gfx_process_ras_data_cb()
864 kgd2kfd_set_sram_ecc_flag(adev->kfd.dev); in amdgpu_gfx_process_ras_data_cb()
865 if (adev->gfx.ras && adev->gfx.ras->ras_block.hw_ops && in amdgpu_gfx_process_ras_data_cb()
866 adev->gfx.ras->ras_block.hw_ops->query_ras_error_count) in amdgpu_gfx_process_ras_data_cb()
867 adev->gfx.ras->ras_block.hw_ops->query_ras_error_count(adev, err_data); in amdgpu_gfx_process_ras_data_cb()
868 amdgpu_ras_reset_gpu(adev); in amdgpu_gfx_process_ras_data_cb()
873 int amdgpu_gfx_cp_ecc_error_irq(struct amdgpu_device *adev, in amdgpu_gfx_cp_ecc_error_irq() argument
877 struct ras_common_if *ras_if = adev->gfx.ras_if; in amdgpu_gfx_cp_ecc_error_irq()
888 amdgpu_ras_interrupt_dispatch(adev, &ih_data); in amdgpu_gfx_cp_ecc_error_irq()
892 void amdgpu_gfx_ras_error_func(struct amdgpu_device *adev, in amdgpu_gfx_ras_error_func() argument
894 void (*func)(struct amdgpu_device *adev, void *ras_error_status, in amdgpu_gfx_ras_error_func() argument
898 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_ras_error_func()
908 func(adev, ras_error_status, i); in amdgpu_gfx_ras_error_func()
911 uint32_t amdgpu_kiq_rreg(struct amdgpu_device *adev, uint32_t reg) in amdgpu_kiq_rreg() argument
916 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in amdgpu_kiq_rreg()
919 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_kiq_rreg()
922 if (adev->mes.ring.sched.ready) in amdgpu_kiq_rreg()
923 return amdgpu_mes_rreg(adev, reg); in amdgpu_kiq_rreg()
928 if (amdgpu_device_wb_get(adev, ®_val_offs)) { in amdgpu_kiq_rreg()
951 if (r < 1 && (amdgpu_in_reset(adev) || in_interrupt())) in amdgpu_kiq_rreg()
964 value = adev->wb.wb[reg_val_offs]; in amdgpu_kiq_rreg()
965 amdgpu_device_wb_free(adev, reg_val_offs); in amdgpu_kiq_rreg()
974 amdgpu_device_wb_free(adev, reg_val_offs); in amdgpu_kiq_rreg()
975 dev_err(adev->dev, "failed to read reg:%x\n", reg); in amdgpu_kiq_rreg()
979 void amdgpu_kiq_wreg(struct amdgpu_device *adev, uint32_t reg, uint32_t v) in amdgpu_kiq_wreg() argument
984 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in amdgpu_kiq_wreg()
989 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_kiq_wreg()
992 if (adev->mes.ring.sched.ready) { in amdgpu_kiq_wreg()
993 amdgpu_mes_wreg(adev, reg, v); in amdgpu_kiq_wreg()
1017 if (r < 1 && (amdgpu_in_reset(adev) || in_interrupt())) in amdgpu_kiq_wreg()
1036 dev_err(adev->dev, "failed to write reg:%x\n", reg); in amdgpu_kiq_wreg()
1039 int amdgpu_gfx_get_num_kcq(struct amdgpu_device *adev) in amdgpu_gfx_get_num_kcq() argument
1044 …dev_warn(adev->dev, "set kernel compute queue number to 8 due to invalid parameter provided by use… in amdgpu_gfx_get_num_kcq()
1050 void amdgpu_gfx_cp_init_microcode(struct amdgpu_device *adev, in amdgpu_gfx_cp_init_microcode() argument
1062 adev->gfx.pfp_fw->data; in amdgpu_gfx_cp_init_microcode()
1063 adev->gfx.pfp_fw_version = in amdgpu_gfx_cp_init_microcode()
1065 adev->gfx.pfp_feature_version = in amdgpu_gfx_cp_init_microcode()
1067 ucode_fw = adev->gfx.pfp_fw; in amdgpu_gfx_cp_init_microcode()
1072 adev->gfx.pfp_fw->data; in amdgpu_gfx_cp_init_microcode()
1073 adev->gfx.pfp_fw_version = in amdgpu_gfx_cp_init_microcode()
1075 adev->gfx.pfp_feature_version = in amdgpu_gfx_cp_init_microcode()
1077 ucode_fw = adev->gfx.pfp_fw; in amdgpu_gfx_cp_init_microcode()
1083 adev->gfx.pfp_fw->data; in amdgpu_gfx_cp_init_microcode()
1084 ucode_fw = adev->gfx.pfp_fw; in amdgpu_gfx_cp_init_microcode()
1089 adev->gfx.me_fw->data; in amdgpu_gfx_cp_init_microcode()
1090 adev->gfx.me_fw_version = in amdgpu_gfx_cp_init_microcode()
1092 adev->gfx.me_feature_version = in amdgpu_gfx_cp_init_microcode()
1094 ucode_fw = adev->gfx.me_fw; in amdgpu_gfx_cp_init_microcode()
1099 adev->gfx.me_fw->data; in amdgpu_gfx_cp_init_microcode()
1100 adev->gfx.me_fw_version = in amdgpu_gfx_cp_init_microcode()
1102 adev->gfx.me_feature_version = in amdgpu_gfx_cp_init_microcode()
1104 ucode_fw = adev->gfx.me_fw; in amdgpu_gfx_cp_init_microcode()
1110 adev->gfx.me_fw->data; in amdgpu_gfx_cp_init_microcode()
1111 ucode_fw = adev->gfx.me_fw; in amdgpu_gfx_cp_init_microcode()
1116 adev->gfx.ce_fw->data; in amdgpu_gfx_cp_init_microcode()
1117 adev->gfx.ce_fw_version = in amdgpu_gfx_cp_init_microcode()
1119 adev->gfx.ce_feature_version = in amdgpu_gfx_cp_init_microcode()
1121 ucode_fw = adev->gfx.ce_fw; in amdgpu_gfx_cp_init_microcode()
1126 adev->gfx.mec_fw->data; in amdgpu_gfx_cp_init_microcode()
1127 adev->gfx.mec_fw_version = in amdgpu_gfx_cp_init_microcode()
1129 adev->gfx.mec_feature_version = in amdgpu_gfx_cp_init_microcode()
1131 ucode_fw = adev->gfx.mec_fw; in amdgpu_gfx_cp_init_microcode()
1137 adev->gfx.mec_fw->data; in amdgpu_gfx_cp_init_microcode()
1138 ucode_fw = adev->gfx.mec_fw; in amdgpu_gfx_cp_init_microcode()
1143 adev->gfx.mec2_fw->data; in amdgpu_gfx_cp_init_microcode()
1144 adev->gfx.mec2_fw_version = in amdgpu_gfx_cp_init_microcode()
1146 adev->gfx.mec2_feature_version = in amdgpu_gfx_cp_init_microcode()
1148 ucode_fw = adev->gfx.mec2_fw; in amdgpu_gfx_cp_init_microcode()
1154 adev->gfx.mec2_fw->data; in amdgpu_gfx_cp_init_microcode()
1155 ucode_fw = adev->gfx.mec2_fw; in amdgpu_gfx_cp_init_microcode()
1160 adev->gfx.mec_fw->data; in amdgpu_gfx_cp_init_microcode()
1161 adev->gfx.mec_fw_version = in amdgpu_gfx_cp_init_microcode()
1163 adev->gfx.mec_feature_version = in amdgpu_gfx_cp_init_microcode()
1165 ucode_fw = adev->gfx.mec_fw; in amdgpu_gfx_cp_init_microcode()
1173 adev->gfx.mec_fw->data; in amdgpu_gfx_cp_init_microcode()
1174 ucode_fw = adev->gfx.mec_fw; in amdgpu_gfx_cp_init_microcode()
1181 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) { in amdgpu_gfx_cp_init_microcode()
1182 info = &adev->firmware.ucode[ucode_id]; in amdgpu_gfx_cp_init_microcode()
1185 adev->firmware.fw_size += ALIGN(fw_size, PAGE_SIZE); in amdgpu_gfx_cp_init_microcode()
1189 bool amdgpu_gfx_is_master_xcc(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_is_master_xcc() argument
1191 return !(xcc_id % (adev->gfx.num_xcc_per_xcp ? in amdgpu_gfx_is_master_xcc()
1192 adev->gfx.num_xcc_per_xcp : 1)); in amdgpu_gfx_is_master_xcc()
1200 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_gfx_get_current_compute_partition() local
1203 mode = amdgpu_xcp_query_partition_mode(adev->xcp_mgr, in amdgpu_gfx_get_current_compute_partition()
1214 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_gfx_set_compute_partition() local
1218 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_set_compute_partition()
1246 ret = amdgpu_xcp_switch_partition_mode(adev->xcp_mgr, mode); in amdgpu_gfx_set_compute_partition()
1259 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_gfx_get_available_compute_partition() local
1263 switch (NUM_XCC(adev->gfx.xcc_mask)) { in amdgpu_gfx_get_available_compute_partition()
1292 int amdgpu_gfx_sysfs_init(struct amdgpu_device *adev) in amdgpu_gfx_sysfs_init() argument
1296 r = device_create_file(adev->dev, &dev_attr_current_compute_partition); in amdgpu_gfx_sysfs_init()
1300 r = device_create_file(adev->dev, &dev_attr_available_compute_partition); in amdgpu_gfx_sysfs_init()
1305 void amdgpu_gfx_sysfs_fini(struct amdgpu_device *adev) in amdgpu_gfx_sysfs_fini() argument
1307 device_remove_file(adev->dev, &dev_attr_current_compute_partition); in amdgpu_gfx_sysfs_fini()
1308 device_remove_file(adev->dev, &dev_attr_available_compute_partition); in amdgpu_gfx_sysfs_fini()