Lines Matching refs:adev
36 static void xgpu_nv_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_nv_mailbox_send_ack() argument
41 static void xgpu_nv_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_nv_mailbox_set_valid() argument
55 static enum idh_event xgpu_nv_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_nv_mailbox_peek_msg() argument
61 static int xgpu_nv_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_msg() argument
70 xgpu_nv_mailbox_send_ack(adev); in xgpu_nv_mailbox_rcv_msg()
75 static uint8_t xgpu_nv_peek_ack(struct amdgpu_device *adev) in xgpu_nv_peek_ack() argument
80 static int xgpu_nv_poll_ack(struct amdgpu_device *adev) in xgpu_nv_poll_ack() argument
99 static int xgpu_nv_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_nv_poll_msg() argument
108 r = xgpu_nv_mailbox_rcv_msg(adev, event); in xgpu_nv_poll_msg()
120 static void xgpu_nv_mailbox_trans_msg (struct amdgpu_device *adev, in xgpu_nv_mailbox_trans_msg() argument
133 xgpu_nv_mailbox_set_valid(adev, false); in xgpu_nv_mailbox_trans_msg()
134 trn = xgpu_nv_peek_ack(adev); in xgpu_nv_mailbox_trans_msg()
145 xgpu_nv_mailbox_set_valid(adev, true); in xgpu_nv_mailbox_trans_msg()
148 r = xgpu_nv_poll_ack(adev); in xgpu_nv_mailbox_trans_msg()
152 xgpu_nv_mailbox_set_valid(adev, false); in xgpu_nv_mailbox_trans_msg()
155 static int xgpu_nv_send_access_requests(struct amdgpu_device *adev, in xgpu_nv_send_access_requests() argument
162 xgpu_nv_mailbox_trans_msg(adev, req, 0, 0, 0); in xgpu_nv_send_access_requests()
178 r = xgpu_nv_poll_msg(adev, event); in xgpu_nv_send_access_requests()
187 adev->virt.req_init_data_ver = 0; in xgpu_nv_send_access_requests()
190 adev->virt.req_init_data_ver = in xgpu_nv_send_access_requests()
194 if (adev->virt.req_init_data_ver < 1) in xgpu_nv_send_access_requests()
195 adev->virt.req_init_data_ver = 1; in xgpu_nv_send_access_requests()
201 adev->virt.fw_reserve.checksum_key = in xgpu_nv_send_access_requests()
209 static int xgpu_nv_request_reset(struct amdgpu_device *adev) in xgpu_nv_request_reset() argument
214 ret = xgpu_nv_send_access_requests(adev, IDH_REQ_GPU_RESET_ACCESS); in xgpu_nv_request_reset()
223 static int xgpu_nv_request_full_gpu_access(struct amdgpu_device *adev, in xgpu_nv_request_full_gpu_access() argument
229 return xgpu_nv_send_access_requests(adev, req); in xgpu_nv_request_full_gpu_access()
232 static int xgpu_nv_release_full_gpu_access(struct amdgpu_device *adev, in xgpu_nv_release_full_gpu_access() argument
239 r = xgpu_nv_send_access_requests(adev, req); in xgpu_nv_release_full_gpu_access()
244 static int xgpu_nv_request_init_data(struct amdgpu_device *adev) in xgpu_nv_request_init_data() argument
246 return xgpu_nv_send_access_requests(adev, IDH_REQ_GPU_INIT_DATA); in xgpu_nv_request_init_data()
249 static int xgpu_nv_mailbox_ack_irq(struct amdgpu_device *adev, in xgpu_nv_mailbox_ack_irq() argument
257 static int xgpu_nv_set_mailbox_ack_irq(struct amdgpu_device *adev, in xgpu_nv_set_mailbox_ack_irq() argument
277 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_nv_mailbox_flr_work() local
284 if (atomic_cmpxchg(&adev->reset_domain->in_gpu_reset, 0, 1) != 0) in xgpu_nv_mailbox_flr_work()
287 down_write(&adev->reset_domain->sem); in xgpu_nv_mailbox_flr_work()
289 amdgpu_virt_fini_data_exchange(adev); in xgpu_nv_mailbox_flr_work()
291 xgpu_nv_mailbox_trans_msg(adev, IDH_READY_TO_RESET, 0, 0, 0); in xgpu_nv_mailbox_flr_work()
294 if (xgpu_nv_mailbox_peek_msg(adev) == IDH_FLR_NOTIFICATION_CMPL) in xgpu_nv_mailbox_flr_work()
302 atomic_set(&adev->reset_domain->in_gpu_reset, 0); in xgpu_nv_mailbox_flr_work()
303 up_write(&adev->reset_domain->sem); in xgpu_nv_mailbox_flr_work()
306 if (amdgpu_device_should_recover_gpu(adev) in xgpu_nv_mailbox_flr_work()
307 && (!amdgpu_device_has_job_running(adev) || in xgpu_nv_mailbox_flr_work()
308 adev->sdma_timeout == MAX_SCHEDULE_TIMEOUT || in xgpu_nv_mailbox_flr_work()
309 adev->gfx_timeout == MAX_SCHEDULE_TIMEOUT || in xgpu_nv_mailbox_flr_work()
310 adev->compute_timeout == MAX_SCHEDULE_TIMEOUT || in xgpu_nv_mailbox_flr_work()
311 adev->video_timeout == MAX_SCHEDULE_TIMEOUT)) { in xgpu_nv_mailbox_flr_work()
316 reset_context.reset_req_dev = adev; in xgpu_nv_mailbox_flr_work()
319 amdgpu_device_gpu_recover(adev, NULL, &reset_context); in xgpu_nv_mailbox_flr_work()
323 static int xgpu_nv_set_mailbox_rcv_irq(struct amdgpu_device *adev, in xgpu_nv_set_mailbox_rcv_irq() argument
340 static int xgpu_nv_mailbox_rcv_irq(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_irq() argument
344 enum idh_event event = xgpu_nv_mailbox_peek_msg(adev); in xgpu_nv_mailbox_rcv_irq()
348 if (amdgpu_sriov_runtime(adev) && !amdgpu_in_reset(adev)) in xgpu_nv_mailbox_rcv_irq()
349 WARN_ONCE(!amdgpu_reset_domain_schedule(adev->reset_domain, in xgpu_nv_mailbox_rcv_irq()
350 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq()
378 void xgpu_nv_mailbox_set_irq_funcs(struct amdgpu_device *adev) in xgpu_nv_mailbox_set_irq_funcs() argument
380 adev->virt.ack_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs()
381 adev->virt.ack_irq.funcs = &xgpu_nv_mailbox_ack_irq_funcs; in xgpu_nv_mailbox_set_irq_funcs()
382 adev->virt.rcv_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs()
383 adev->virt.rcv_irq.funcs = &xgpu_nv_mailbox_rcv_irq_funcs; in xgpu_nv_mailbox_set_irq_funcs()
386 int xgpu_nv_mailbox_add_irq_id(struct amdgpu_device *adev) in xgpu_nv_mailbox_add_irq_id() argument
390 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_nv_mailbox_add_irq_id()
394 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_nv_mailbox_add_irq_id()
396 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_add_irq_id()
403 int xgpu_nv_mailbox_get_irq(struct amdgpu_device *adev) in xgpu_nv_mailbox_get_irq() argument
407 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq()
410 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_get_irq()
412 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq()
416 INIT_WORK(&adev->virt.flr_work, xgpu_nv_mailbox_flr_work); in xgpu_nv_mailbox_get_irq()
421 void xgpu_nv_mailbox_put_irq(struct amdgpu_device *adev) in xgpu_nv_mailbox_put_irq() argument
423 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq()
424 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_put_irq()
427 static void xgpu_nv_ras_poison_handler(struct amdgpu_device *adev) in xgpu_nv_ras_poison_handler() argument
429 xgpu_nv_send_access_requests(adev, IDH_RAS_POISON); in xgpu_nv_ras_poison_handler()