Lines Matching refs:kfd_process
181 struct kfd_process *p; in svm_range_dma_map()
185 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_dma_map()
231 struct kfd_process *p; in svm_range_free_dma_mappings()
234 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_free_dma_mappings()
282 struct kfd_process *p; in svm_range_new()
303 p = container_of(svms, struct kfd_process, svms); in svm_range_new()
468 struct kfd_process *p; in svm_range_vram_node_new()
472 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_vram_node_new()
561 struct kfd_process *p; in svm_range_get_adev_by_id()
564 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_get_adev_by_id()
583 struct kfd_process *p; in svm_range_get_pdd_by_adev()
587 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_get_pdd_by_adev()
608 svm_range_check_attr(struct kfd_process *p, in svm_range_check_attr()
657 svm_range_apply_attrs(struct kfd_process *p, struct svm_range *prange, in svm_range_apply_attrs()
992 svm_range_split_by_granularity(struct kfd_process *p, struct mm_struct *mm, in svm_range_split_by_granularity()
1126 struct kfd_process *p; in svm_range_unmap_from_gpus()
1132 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_unmap_from_gpus()
1236 struct kfd_process *p; in svm_range_map_to_gpu()
1238 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_map_to_gpu()
1254 struct kfd_process *p; in svm_range_map_to_gpus()
1264 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_map_to_gpus()
1306 struct kfd_process *process;
1379 static void *kfd_svm_page_owner(struct kfd_process *p, int32_t gpuidx) in kfd_svm_page_owner()
1420 struct kfd_process *p; in svm_range_validate_and_map()
1425 ctx.process = container_of(prange->svms, struct kfd_process, svms); in svm_range_validate_and_map()
1468 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_validate_and_map()
1576 struct kfd_process *p; in svm_range_restore_work()
1592 p = container_of(svms, struct kfd_process, svms); in svm_range_restore_work()
1678 struct kfd_process *p; in svm_range_evict()
1681 p = container_of(svms, struct kfd_process, svms); in svm_range_evict()
1966 struct kfd_process *p; in svm_range_drain_retry_fault()
1969 p = container_of(svms, struct kfd_process, svms); in svm_range_drain_retry_fault()
2113 struct kfd_process *p; in svm_range_unmap_from_cpu()
2280 struct kfd_process *p; in svm_range_best_restore_location()
2284 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_best_restore_location()
2312 svm_range_get_range_boundaries(struct kfd_process *p, int64_t addr, in svm_range_get_range_boundaries()
2362 struct kfd_process *p, in svm_range_create_unregistered_range()
2429 svm_range_count_fault(struct amdgpu_device *adev, struct kfd_process *p, in svm_range_count_fault()
2482 struct kfd_process *p; in svm_range_restore_pages()
2628 void svm_range_list_fini(struct kfd_process *p) in svm_range_list_fini()
2649 int svm_range_list_init(struct kfd_process *p) in svm_range_list_init()
2726 svm_range_add(struct kfd_process *p, uint64_t start, uint64_t size, in svm_range_add()
2792 struct kfd_process *p; in svm_range_best_prefetch_location()
2795 p = container_of(prange->svms, struct kfd_process, svms); in svm_range_best_prefetch_location()
2929 struct kfd_process *p; in svm_range_evict_svm_bo_worker()
2939 p = container_of(svm_bo->svms, struct kfd_process, svms); in svm_range_evict_svm_bo_worker()
2979 svm_range_set_attr(struct kfd_process *p, uint64_t start, uint64_t size, in svm_range_set_attr()
3087 svm_range_get_attr(struct kfd_process *p, uint64_t start, uint64_t size, in svm_range_get_attr()
3263 svm_ioctl(struct kfd_process *p, enum kfd_ioctl_svm_op op, uint64_t start, in svm_ioctl()