Lines Matching refs:adev

37 static int amdgpu_ih_ring_alloc(struct amdgpu_device *adev)  in amdgpu_ih_ring_alloc()  argument
42 if (adev->irq.ih.ring_obj == NULL) { in amdgpu_ih_ring_alloc()
43 r = amdgpu_bo_create_kernel(adev, adev->irq.ih.ring_size, in amdgpu_ih_ring_alloc()
45 &adev->irq.ih.ring_obj, in amdgpu_ih_ring_alloc()
46 &adev->irq.ih.gpu_addr, in amdgpu_ih_ring_alloc()
47 (void **)&adev->irq.ih.ring); in amdgpu_ih_ring_alloc()
65 int amdgpu_ih_ring_init(struct amdgpu_device *adev, unsigned ring_size, in amdgpu_ih_ring_init() argument
74 adev->irq.ih.ring_size = ring_size; in amdgpu_ih_ring_init()
75 adev->irq.ih.ptr_mask = adev->irq.ih.ring_size - 1; in amdgpu_ih_ring_init()
76 adev->irq.ih.rptr = 0; in amdgpu_ih_ring_init()
77 adev->irq.ih.use_bus_addr = use_bus_addr; in amdgpu_ih_ring_init()
79 if (adev->irq.ih.use_bus_addr) { in amdgpu_ih_ring_init()
80 if (!adev->irq.ih.ring) { in amdgpu_ih_ring_init()
84 adev->irq.ih.ring = pci_alloc_consistent(adev->pdev, in amdgpu_ih_ring_init()
85 adev->irq.ih.ring_size + 8, in amdgpu_ih_ring_init()
86 &adev->irq.ih.rb_dma_addr); in amdgpu_ih_ring_init()
87 if (adev->irq.ih.ring == NULL) in amdgpu_ih_ring_init()
89 memset((void *)adev->irq.ih.ring, 0, adev->irq.ih.ring_size + 8); in amdgpu_ih_ring_init()
90 adev->irq.ih.wptr_offs = (adev->irq.ih.ring_size / 4) + 0; in amdgpu_ih_ring_init()
91 adev->irq.ih.rptr_offs = (adev->irq.ih.ring_size / 4) + 1; in amdgpu_ih_ring_init()
95 r = amdgpu_device_wb_get(adev, &adev->irq.ih.wptr_offs); in amdgpu_ih_ring_init()
97 dev_err(adev->dev, "(%d) ih wptr_offs wb alloc failed\n", r); in amdgpu_ih_ring_init()
101 r = amdgpu_device_wb_get(adev, &adev->irq.ih.rptr_offs); in amdgpu_ih_ring_init()
103 amdgpu_device_wb_free(adev, adev->irq.ih.wptr_offs); in amdgpu_ih_ring_init()
104 dev_err(adev->dev, "(%d) ih rptr_offs wb alloc failed\n", r); in amdgpu_ih_ring_init()
108 return amdgpu_ih_ring_alloc(adev); in amdgpu_ih_ring_init()
120 void amdgpu_ih_ring_fini(struct amdgpu_device *adev) in amdgpu_ih_ring_fini() argument
122 if (adev->irq.ih.use_bus_addr) { in amdgpu_ih_ring_fini()
123 if (adev->irq.ih.ring) { in amdgpu_ih_ring_fini()
127 pci_free_consistent(adev->pdev, adev->irq.ih.ring_size + 8, in amdgpu_ih_ring_fini()
128 (void *)adev->irq.ih.ring, in amdgpu_ih_ring_fini()
129 adev->irq.ih.rb_dma_addr); in amdgpu_ih_ring_fini()
130 adev->irq.ih.ring = NULL; in amdgpu_ih_ring_fini()
133 amdgpu_bo_free_kernel(&adev->irq.ih.ring_obj, in amdgpu_ih_ring_fini()
134 &adev->irq.ih.gpu_addr, in amdgpu_ih_ring_fini()
135 (void **)&adev->irq.ih.ring); in amdgpu_ih_ring_fini()
136 amdgpu_device_wb_free(adev, adev->irq.ih.wptr_offs); in amdgpu_ih_ring_fini()
137 amdgpu_device_wb_free(adev, adev->irq.ih.rptr_offs); in amdgpu_ih_ring_fini()
149 int amdgpu_ih_process(struct amdgpu_device *adev) in amdgpu_ih_process() argument
154 if (!adev->irq.ih.enabled || adev->shutdown) in amdgpu_ih_process()
157 wptr = amdgpu_ih_get_wptr(adev); in amdgpu_ih_process()
161 if (atomic_xchg(&adev->irq.ih.lock, 1)) in amdgpu_ih_process()
164 DRM_DEBUG("%s: rptr %d, wptr %d\n", __func__, adev->irq.ih.rptr, wptr); in amdgpu_ih_process()
169 while (adev->irq.ih.rptr != wptr) { in amdgpu_ih_process()
170 u32 ring_index = adev->irq.ih.rptr >> 2; in amdgpu_ih_process()
173 if (!amdgpu_ih_prescreen_iv(adev)) { in amdgpu_ih_process()
174 adev->irq.ih.rptr &= adev->irq.ih.ptr_mask; in amdgpu_ih_process()
179 amdgpu_amdkfd_interrupt(adev, in amdgpu_ih_process()
180 (const void *) &adev->irq.ih.ring[ring_index]); in amdgpu_ih_process()
183 &adev->irq.ih.ring[ring_index]; in amdgpu_ih_process()
184 amdgpu_ih_decode_iv(adev, &entry); in amdgpu_ih_process()
185 adev->irq.ih.rptr &= adev->irq.ih.ptr_mask; in amdgpu_ih_process()
187 amdgpu_irq_dispatch(adev, &entry); in amdgpu_ih_process()
189 amdgpu_ih_set_rptr(adev); in amdgpu_ih_process()
190 atomic_set(&adev->irq.ih.lock, 0); in amdgpu_ih_process()
193 wptr = amdgpu_ih_get_wptr(adev); in amdgpu_ih_process()
194 if (wptr != adev->irq.ih.rptr) in amdgpu_ih_process()
218 int amdgpu_ih_add_fault(struct amdgpu_device *adev, u64 key) in amdgpu_ih_add_fault() argument
223 if (WARN_ON_ONCE(!adev->irq.ih.faults)) in amdgpu_ih_add_fault()
229 spin_lock_irqsave(&adev->irq.ih.faults->lock, flags); in amdgpu_ih_add_fault()
232 if (adev->irq.ih.faults->count >= (1 << (AMDGPU_PAGEFAULT_HASH_BITS-1))) in amdgpu_ih_add_fault()
235 r = chash_table_copy_in(&adev->irq.ih.faults->hash, key, NULL); in amdgpu_ih_add_fault()
237 adev->irq.ih.faults->count++; in amdgpu_ih_add_fault()
243 spin_unlock_irqrestore(&adev->irq.ih.faults->lock, flags); in amdgpu_ih_add_fault()
257 void amdgpu_ih_clear_fault(struct amdgpu_device *adev, u64 key) in amdgpu_ih_clear_fault() argument
262 if (!adev->irq.ih.faults) in amdgpu_ih_clear_fault()
265 spin_lock_irqsave(&adev->irq.ih.faults->lock, flags); in amdgpu_ih_clear_fault()
267 r = chash_table_remove(&adev->irq.ih.faults->hash, key, NULL); in amdgpu_ih_clear_fault()
269 adev->irq.ih.faults->count--; in amdgpu_ih_clear_fault()
270 WARN_ON_ONCE(adev->irq.ih.faults->count < 0); in amdgpu_ih_clear_fault()
273 spin_unlock_irqrestore(&adev->irq.ih.faults->lock, flags); in amdgpu_ih_clear_fault()