Home
last modified time | relevance | path

Searched refs:bos (Results 1 – 25 of 51) sorted by relevance

123

/Linux-v5.4/drivers/gpu/drm/etnaviv/
Detnaviv_gem_submit.c35 size_t sz = size_vstruct(nr_bos, sizeof(submit->bos[0]), sizeof(*submit)); in submit_create()
74 submit->bos[i].flags = bo->flags; in submit_lookup_objects()
81 submit->bos[i].va = bo->presumed; in submit_lookup_objects()
101 submit->bos[i].obj = to_etnaviv_bo(obj); in submit_lookup_objects()
113 if (submit->bos[i].flags & BO_LOCKED) { in submit_unlock_object()
114 struct drm_gem_object *obj = &submit->bos[i].obj->base; in submit_unlock_object()
117 submit->bos[i].flags &= ~BO_LOCKED; in submit_unlock_object()
128 struct drm_gem_object *obj = &submit->bos[i].obj->base; in submit_lock_objects()
135 if (!(submit->bos[i].flags & BO_LOCKED)) { in submit_lock_objects()
143 submit->bos[i].flags |= BO_LOCKED; in submit_lock_objects()
[all …]
Detnaviv_dump.c141 obj = submit->bos[i].obj; in etnaviv_core_dump()
201 obj = submit->bos[i].obj; in etnaviv_core_dump()
202 vram = submit->bos[i].mapping; in etnaviv_core_dump()
/Linux-v5.4/drivers/gpu/drm/msm/
Dmsm_gem_submit.c33 uint64_t sz = struct_size(submit, bos, nr_bos) + in submit_create()
47 submit->cmd = (void *)&submit->bos[nr_bos]; in submit_create()
81 u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); in submit_lookup_objects()
86 submit->bos[i].flags = 0; in submit_lookup_objects()
105 submit->bos[i].handle = submit_bo.handle; in submit_lookup_objects()
106 submit->bos[i].flags = submit_bo.flags; in submit_lookup_objects()
108 submit->bos[i].iova = submit_bo.presumed; in submit_lookup_objects()
120 obj = idr_find(&file->object_idr, submit->bos[i].handle); in submit_lookup_objects()
122 DRM_ERROR("invalid handle %u at index %u\n", submit->bos[i].handle, i); in submit_lookup_objects()
131 submit->bos[i].handle, i); in submit_lookup_objects()
[all …]
Dmsm_fb.c25 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos);
104 struct drm_gem_object *bos[4] = {0}; in msm_framebuffer_create() local
109 bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); in msm_framebuffer_create()
110 if (!bos[i]) { in msm_framebuffer_create()
116 fb = msm_framebuffer_init(dev, mode_cmd, bos); in msm_framebuffer_create()
126 drm_gem_object_put_unlocked(bos[i]); in msm_framebuffer_create()
131 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) in msm_framebuffer_init() argument
180 if (bos[i]->size < min_size) { in msm_framebuffer_init()
185 msm_fb->base.obj[i] = bos[i]; in msm_framebuffer_init()
Dmsm_rd.c303 struct msm_gem_object *obj = submit->bos[idx].obj; in snapshot_buf()
308 offset = iova - submit->bos[idx].iova; in snapshot_buf()
310 iova = submit->bos[idx].iova; in snapshot_buf()
322 if (!(submit->bos[idx].flags & MSM_SUBMIT_BO_READ)) in snapshot_buf()
339 return rd_full || (submit->bos[idx].flags & MSM_SUBMIT_BO_DUMP); in should_dump()
Dmsm_gpu.c307 struct msm_gpu_state_bo *state_bo = &state->bos[state->nr_bos]; in msm_gpu_crashstate_get_bo()
359 state->bos = kcalloc(submit->nr_cmds, in msm_gpu_crashstate_capture()
362 for (i = 0; state->bos && i < submit->nr_cmds; i++) { in msm_gpu_crashstate_capture()
365 msm_gpu_crashstate_get_bo(state, submit->bos[idx].obj, in msm_gpu_crashstate_capture()
366 submit->bos[idx].iova, submit->bos[idx].flags); in msm_gpu_crashstate_capture()
674 struct msm_gem_object *msm_obj = submit->bos[i].obj; in retire_submit()
751 struct msm_gem_object *msm_obj = submit->bos[i].obj; in msm_gpu_submit()
763 if (submit->bos[i].flags & MSM_SUBMIT_BO_WRITE) in msm_gpu_submit()
765 else if (submit->bos[i].flags & MSM_SUBMIT_BO_READ) in msm_gpu_submit()
/Linux-v5.4/drivers/gpu/drm/lima/
Dlima_gem.c151 static int lima_gem_lock_bos(struct lima_bo **bos, u32 nr_bos, in lima_gem_lock_bos() argument
165 ret = ww_mutex_lock_interruptible(&bos[i]->gem.resv->lock, ctx); in lima_gem_lock_bos()
177 ww_mutex_unlock(&bos[i]->gem.resv->lock); in lima_gem_lock_bos()
180 ww_mutex_unlock(&bos[slow_locked]->gem.resv->lock); in lima_gem_lock_bos()
185 &bos[contended]->gem.resv->lock, ctx); in lima_gem_lock_bos()
196 static void lima_gem_unlock_bos(struct lima_bo **bos, u32 nr_bos, in lima_gem_unlock_bos() argument
202 ww_mutex_unlock(&bos[i]->gem.resv->lock); in lima_gem_unlock_bos()
239 struct lima_bo **bos = submit->lbos; in lima_gem_submit() local
251 obj = drm_gem_object_lookup(file, submit->bos[i].handle); in lima_gem_submit()
268 bos[i] = bo; in lima_gem_submit()
[all …]
Dlima_sched.c110 struct lima_bo **bos, int num_bos, in lima_sched_task_init() argument
115 task->bos = kmemdup(bos, sizeof(*bos) * num_bos, GFP_KERNEL); in lima_sched_task_init()
116 if (!task->bos) in lima_sched_task_init()
120 drm_gem_object_get(&bos[i]->gem); in lima_sched_task_init()
124 kfree(task->bos); in lima_sched_task_init()
149 if (task->bos) { in lima_sched_task_fini()
151 drm_gem_object_put_unlocked(&task->bos[i]->gem); in lima_sched_task_fini()
152 kfree(task->bos); in lima_sched_task_fini()
302 struct lima_bo **bos = task->bos; in lima_sched_free_job() local
308 lima_vm_bo_del(vm, bos[i]); in lima_sched_free_job()
Dlima_drv.c93 struct drm_lima_gem_submit_bo *bos; in lima_ioctl_gem_submit() local
111 bos = kvcalloc(args->nr_bos, sizeof(*submit.bos) + sizeof(*submit.lbos), GFP_KERNEL); in lima_ioctl_gem_submit()
112 if (!bos) in lima_ioctl_gem_submit()
115 size = args->nr_bos * sizeof(*submit.bos); in lima_ioctl_gem_submit()
116 if (copy_from_user(bos, u64_to_user_ptr(args->bos), size)) { in lima_ioctl_gem_submit()
144 submit.bos = bos; in lima_ioctl_gem_submit()
145 submit.lbos = (void *)bos + size; in lima_ioctl_gem_submit()
161 kvfree(bos); in lima_ioctl_gem_submit()
Dlima_sched.h20 struct lima_bo **bos; member
77 struct lima_bo **bos, int num_bos,
Dlima_drv.h29 struct drm_lima_gem_submit_bo *bos; member
/Linux-v5.4/drivers/gpu/drm/radeon/
Dradeon_mn.c49 struct list_head bos; member
97 list_for_each_entry(bo, &node->bos, mn_list) { in radeon_mn_invalidate_range_start()
181 struct list_head bos; in radeon_mn_register() local
189 INIT_LIST_HEAD(&bos); in radeon_mn_register()
199 list_splice(&node->bos, &bos); in radeon_mn_register()
214 INIT_LIST_HEAD(&node->bos); in radeon_mn_register()
215 list_splice(&bos, &node->bos); in radeon_mn_register()
216 list_add(&bo->mn_list, &node->bos); in radeon_mn_register()
248 node = container_of(head, struct radeon_mn_node, bos); in radeon_mn_unregister()
/Linux-v5.4/drivers/gpu/drm/qxl/
Dqxl_release.c140 INIT_LIST_HEAD(&release->bos); in qxl_release_alloc()
162 while (!list_empty(&release->bos)) { in qxl_release_free_list()
166 entry = container_of(release->bos.next, in qxl_release_free_list()
190 WARN_ON(list_empty(&release->bos)); in qxl_release_free()
213 list_for_each_entry(entry, &release->bos, tv.head) { in qxl_release_list_add()
225 list_add_tail(&entry->tv.head, &release->bos); in qxl_release_list_add()
259 if (list_is_singular(&release->bos)) in qxl_release_reserve_list()
262 ret = ttm_eu_reserve_buffers(&release->ticket, &release->bos, in qxl_release_reserve_list()
267 list_for_each_entry(entry, &release->bos, tv.head) { in qxl_release_reserve_list()
272 ttm_eu_backoff_reservation(&release->ticket, &release->bos); in qxl_release_reserve_list()
[all …]
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/
Damdgpu_mn.c63 struct list_head bos; member
85 list_for_each_entry_safe(bo, next_bo, &node->bos, mn_list) { in amdgpu_mn_destroy()
177 list_for_each_entry(bo, &node->bos, mn_list) { in amdgpu_mn_invalidate_node()
276 list_for_each_entry(bo, &node->bos, mn_list) { in amdgpu_mn_sync_pagetables_hsa()
383 struct list_head bos; in amdgpu_mn_register() local
394 INIT_LIST_HEAD(&bos); in amdgpu_mn_register()
404 list_splice(&node->bos, &bos); in amdgpu_mn_register()
416 INIT_LIST_HEAD(&node->bos); in amdgpu_mn_register()
417 list_splice(&bos, &node->bos); in amdgpu_mn_register()
418 list_add(&bo->mn_list, &node->bos); in amdgpu_mn_register()
[all …]
/Linux-v5.4/drivers/staging/wusbcore/
Ddevconnect.c690 struct usb_bos_descriptor *bos, size_t desc_size) in wusb_dev_bos_grok() argument
697 itr = (void *)bos + sizeof(*bos); in wusb_dev_bos_grok()
698 top = itr + desc_size - sizeof(*bos); in wusb_dev_bos_grok()
706 (int)(itr - (void *)bos), top - itr); in wusb_dev_bos_grok()
718 (int)(itr - (void *)bos), in wusb_dev_bos_grok()
736 cap_size, (int)(itr - (void *)bos)); in wusb_dev_bos_grok()
764 struct usb_bos_descriptor *bos; in wusb_dev_bos_add() local
767 bos = kmalloc(alloc_size, GFP_KERNEL); in wusb_dev_bos_add()
768 if (bos == NULL) in wusb_dev_bos_add()
770 result = usb_get_descriptor(usb_dev, USB_DT_BOS, 0, bos, desc_size); in wusb_dev_bos_add()
[all …]
/Linux-v5.4/drivers/usb/core/
Dconfig.c906 if (dev->bos) { in usb_release_bos_descriptor()
907 kfree(dev->bos->desc); in usb_release_bos_descriptor()
908 kfree(dev->bos); in usb_release_bos_descriptor()
909 dev->bos = NULL; in usb_release_bos_descriptor()
926 struct usb_bos_descriptor *bos; in usb_get_bos_descriptor() local
934 bos = kzalloc(sizeof(struct usb_bos_descriptor), GFP_KERNEL); in usb_get_bos_descriptor()
935 if (!bos) in usb_get_bos_descriptor()
939 ret = usb_get_descriptor(dev, USB_DT_BOS, 0, bos, USB_DT_BOS_SIZE); in usb_get_bos_descriptor()
940 if (ret < USB_DT_BOS_SIZE || bos->bLength < USB_DT_BOS_SIZE) { in usb_get_bos_descriptor()
944 kfree(bos); in usb_get_bos_descriptor()
[all …]
/Linux-v5.4/drivers/gpu/drm/omapdrm/
Domap_fb.c312 struct drm_gem_object *bos[4]; in omap_framebuffer_create() local
317 bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); in omap_framebuffer_create()
318 if (!bos[i]) { in omap_framebuffer_create()
324 fb = omap_framebuffer_init(dev, mode_cmd, bos); in omap_framebuffer_create()
332 drm_gem_object_put_unlocked(bos[i]); in omap_framebuffer_create()
338 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) in omap_framebuffer_init() argument
400 if (size > omap_gem_mmap_size(bos[i]) - mode_cmd->offsets[i]) { in omap_framebuffer_init()
403 bos[i]->size - mode_cmd->offsets[i], size); in omap_framebuffer_init()
408 fb->obj[i] = bos[i]; in omap_framebuffer_init()
Domap_fb.h25 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos);
/Linux-v5.4/drivers/gpu/drm/panfrost/
Dpanfrost_job.c190 static void panfrost_acquire_object_fences(struct drm_gem_object **bos, in panfrost_acquire_object_fences() argument
197 implicit_fences[i] = dma_resv_get_excl_rcu(bos[i]->resv); in panfrost_acquire_object_fences()
200 static void panfrost_attach_object_fences(struct drm_gem_object **bos, in panfrost_attach_object_fences() argument
207 dma_resv_add_excl_fence(bos[i]->resv, fence); in panfrost_attach_object_fences()
220 ret = drm_gem_lock_reservations(job->bos, job->bo_count, in panfrost_job_push()
237 panfrost_acquire_object_fences(job->bos, job->bo_count, in panfrost_job_push()
244 panfrost_attach_object_fences(job->bos, job->bo_count, in panfrost_job_push()
248 drm_gem_unlock_reservations(job->bos, job->bo_count, &acquire_ctx); in panfrost_job_push()
272 if (job->bos) { in panfrost_job_cleanup()
274 drm_gem_object_put_unlocked(job->bos[i]); in panfrost_job_cleanup()
[all …]
/Linux-v5.4/net/mpls/
Dinternal.h15 u8 bos; member
175 static inline struct mpls_shim_hdr mpls_entry_encode(u32 label, unsigned ttl, unsigned tc, bool bos) in mpls_entry_encode() argument
181 (bos ? (1 << MPLS_LS_S_SHIFT) : 0) | in mpls_entry_encode()
194 result.bos = (entry & MPLS_LS_S_MASK) >> MPLS_LS_S_SHIFT; in mpls_entry_decode()
Dmpls_iptunnel.c50 bool bos; in mpls_xmit() local
126 bos = true; in mpls_xmit()
129 ttl, 0, bos); in mpls_xmit()
130 bos = false; in mpls_xmit()
Daf_mpls.c193 if (!dec.bos) in mpls_multipath_hash()
432 if (unlikely(!new_header_size && dec.bos)) { in mpls_forward()
437 bool bos; in mpls_forward() local
443 bos = dec.bos; in mpls_forward()
446 dec.ttl, 0, bos); in mpls_forward()
447 bos = false; in mpls_forward()
1684 bool bos; in nla_put_labels() local
1691 bos = true; in nla_put_labels()
1693 nla_label[i] = mpls_entry_encode(label[i], 0, 0, bos); in nla_put_labels()
1694 bos = false; in nla_put_labels()
[all …]
/Linux-v5.4/drivers/gpu/drm/msm/adreno/
Dadreno_gpu.c595 for (i = 0; state->bos && i < state->nr_bos; i++) in adreno_gpu_state_destroy()
596 kvfree(state->bos[i].data); in adreno_gpu_state_destroy()
598 kfree(state->bos); in adreno_gpu_state_destroy()
726 if (state->bos) { in adreno_show()
731 state->bos[i].iova); in adreno_show()
732 drm_printf(p, " size: %zd\n", state->bos[i].size); in adreno_show()
734 adreno_show_object(p, &state->bos[i].data, in adreno_show()
735 state->bos[i].size, &state->bos[i].encoded); in adreno_show()
/Linux-v5.4/drivers/usb/gadget/
Dcomposite.c614 struct usb_bos_descriptor *bos = cdev->req->buf; in bos_desc() local
617 bos->bLength = USB_DT_BOS_SIZE; in bos_desc()
618 bos->bDescriptorType = USB_DT_BOS; in bos_desc()
620 bos->wTotalLength = cpu_to_le16(USB_DT_BOS_SIZE); in bos_desc()
621 bos->bNumDeviceCaps = 0; in bos_desc()
650 usb_ext = cdev->req->buf + le16_to_cpu(bos->wTotalLength); in bos_desc()
651 bos->bNumDeviceCaps++; in bos_desc()
652 le16_add_cpu(&bos->wTotalLength, USB_DT_USB_EXT_CAP_SIZE); in bos_desc()
666 ss_cap = cdev->req->buf + le16_to_cpu(bos->wTotalLength); in bos_desc()
667 bos->bNumDeviceCaps++; in bos_desc()
[all …]
/Linux-v5.4/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_formats.h49 struct drm_gem_object **bos);

123