Home
last modified time | relevance | path

Searched refs:gem (Results 1 – 25 of 97) sorted by relevance

1234

/Linux-v6.1/drivers/gpu/drm/tegra/
Dgem.c55 drm_gem_object_put(&obj->gem); in tegra_bo_put()
62 struct drm_gem_object *gem = &obj->gem; in tegra_bo_pin() local
78 if (gem->import_attach) { in tegra_bo_pin()
79 struct dma_buf *buf = gem->import_attach->dmabuf; in tegra_bo_pin()
96 map->size = gem->size; in tegra_bo_pin()
116 err = sg_alloc_table_from_pages(map->sgt, obj->pages, obj->num_pages, 0, gem->size, in tegra_bo_pin()
126 err = dma_get_sgtable(dev, map->sgt, obj->vaddr, obj->iova, gem->size); in tegra_bo_pin()
148 map->size = gem->size; in tegra_bo_pin()
183 } else if (obj->gem.import_attach) { in tegra_bo_mmap()
184 ret = dma_buf_vmap(obj->gem.import_attach->dmabuf, &map); in tegra_bo_mmap()
[all …]
Dgem.h36 struct drm_gem_object gem; member
52 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument
54 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo()
69 void tegra_bo_free_object(struct drm_gem_object *gem);
75 int __tegra_gem_mmap(struct drm_gem_object *gem, struct vm_area_struct *vma);
78 struct dma_buf *tegra_gem_prime_export(struct drm_gem_object *gem,
Dfb.c127 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc()
146 struct drm_gem_object *gem; in tegra_fb_create() local
156 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create()
157 if (!gem) { in tegra_fb_create()
167 if (gem->size < size) { in tegra_fb_create()
172 planes[i] = to_tegra_bo(gem); in tegra_fb_create()
185 drm_gem_object_put(&planes[i]->gem); in tegra_fb_create()
199 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap()
203 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap()
249 drm_gem_object_put(&bo->gem); in tegra_fbdev_probe()
[all …]
Ddrm.c254 refs[num_refs++] = &obj->gem; in tegra_drm_submit()
261 if (offset & 3 || offset > obj->gem.size) { in tegra_drm_submit()
284 refs[num_refs++] = &obj->gem; in tegra_drm_submit()
292 reloc->cmdbuf.offset >= obj->gem.size) { in tegra_drm_submit()
298 refs[num_refs++] = &obj->gem; in tegra_drm_submit()
300 if (reloc->target.offset >= obj->gem.size) { in tegra_drm_submit()
370 struct drm_gem_object *gem; in tegra_gem_mmap() local
373 gem = drm_gem_object_lookup(file, args->handle); in tegra_gem_mmap()
374 if (!gem) in tegra_gem_mmap()
377 bo = to_tegra_bo(gem); in tegra_gem_mmap()
[all …]
/Linux-v6.1/drivers/gpu/drm/
Ddrm_gem_ttm_helper.c24 const struct drm_gem_object *gem) in drm_gem_ttm_print_info() argument
40 const struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_print_info()
63 int drm_gem_ttm_vmap(struct drm_gem_object *gem, in drm_gem_ttm_vmap() argument
66 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_vmap()
69 dma_resv_lock(gem->resv, NULL); in drm_gem_ttm_vmap()
71 dma_resv_unlock(gem->resv); in drm_gem_ttm_vmap()
85 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, in drm_gem_ttm_vunmap() argument
88 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_vunmap()
90 dma_resv_lock(gem->resv, NULL); in drm_gem_ttm_vunmap()
92 dma_resv_unlock(gem->resv); in drm_gem_ttm_vunmap()
[all …]
Ddrm_gem_vram_helper.c191 struct drm_gem_object *gem; in drm_gem_vram_create() local
200 gem = dev->driver->gem_create_object(dev, size); in drm_gem_vram_create()
201 if (IS_ERR(gem)) in drm_gem_vram_create()
202 return ERR_CAST(gem); in drm_gem_vram_create()
203 gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_create()
208 gem = &gbo->bo.base; in drm_gem_vram_create()
211 if (!gem->funcs) in drm_gem_vram_create()
212 gem->funcs = &drm_gem_vram_object_funcs; in drm_gem_vram_create()
214 ret = drm_gem_object_init(dev, gem, size); in drm_gem_vram_create()
593 static void drm_gem_vram_object_free(struct drm_gem_object *gem) in drm_gem_vram_object_free() argument
[all …]
Ddrm_fb_dma_helper.c49 struct drm_gem_object *gem; in drm_fb_dma_get_gem_obj() local
51 gem = drm_gem_fb_get_obj(fb, plane); in drm_fb_dma_get_gem_obj()
52 if (!gem) in drm_fb_dma_get_gem_obj()
55 return to_drm_gem_dma_obj(gem); in drm_fb_dma_get_gem_obj()
Ddrm_client.c238 drm_gem_vunmap(buffer->gem, &buffer->map); in drm_client_buffer_delete()
240 if (buffer->gem) in drm_client_buffer_delete()
241 drm_gem_object_put(buffer->gem); in drm_client_buffer_delete()
281 buffer->gem = obj; in drm_client_buffer_create()
326 ret = drm_gem_vmap(buffer->gem, map); in drm_client_buffer_vmap()
348 drm_gem_vunmap(buffer->gem, map); in drm_client_buffer_vunmap()
/Linux-v6.1/drivers/gpu/drm/i915/
DMakefile144 gem-y += \
145 gem/i915_gem_busy.o \
146 gem/i915_gem_clflush.o \
147 gem/i915_gem_context.o \
148 gem/i915_gem_create.o \
149 gem/i915_gem_dmabuf.o \
150 gem/i915_gem_domain.o \
151 gem/i915_gem_execbuffer.o \
152 gem/i915_gem_internal.o \
153 gem/i915_gem_object.o \
[all …]
Di915_sysfs.c80 spin_lock(&i915->gem.contexts.lock); in i915_l3_read()
85 spin_unlock(&i915->gem.contexts.lock); in i915_l3_read()
113 spin_lock(&i915->gem.contexts.lock); in i915_l3_write()
126 list_for_each_entry(ctx, &i915->gem.contexts.list, link) in i915_l3_write()
129 spin_unlock(&i915->gem.contexts.lock); in i915_l3_write()
/Linux-v6.1/drivers/gpu/drm/nouveau/
Dnouveau_gem.c77 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument
79 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del()
90 if (gem->import_attach) in nouveau_gem_object_del()
91 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del()
100 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument
103 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open()
177 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument
180 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close()
267 nouveau_gem_info(struct drm_file *file_priv, struct drm_gem_object *gem, in nouveau_gem_info() argument
271 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_info()
[all …]
Dnouveau_gem.h11 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument
13 return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL; in nouveau_gem_object()
Dnouveau_ttm.c192 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram()
198 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram()
221 unsigned long size_pages = drm->gem.gart_available >> PAGE_SHIFT; in nouveau_ttm_init_gtt()
317 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init()
333 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init()
335 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init()
347 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init()
348 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
Dnouveau_display.c302 struct drm_gem_object *gem, in nouveau_framebuffer_new() argument
306 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_framebuffer_new()
369 fb->obj[0] = gem; in nouveau_framebuffer_new()
383 struct drm_gem_object *gem; in nouveau_user_framebuffer_create() local
386 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); in nouveau_user_framebuffer_create()
387 if (!gem) in nouveau_user_framebuffer_create()
390 ret = nouveau_framebuffer_new(dev, mode_cmd, gem, &fb); in nouveau_user_framebuffer_create()
394 drm_gem_object_put(gem); in nouveau_user_framebuffer_create()
/Linux-v6.1/drivers/net/ethernet/sun/
Dsungem.c116 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read()
144 struct gem *gp = netdev_priv(dev); in _sungem_phy_read()
148 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read()
153 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write()
177 struct gem *gp = netdev_priv(dev); in _sungem_phy_write()
181 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write()
186 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints()
192 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints()
199 static void gem_get_cell(struct gem *gp) in gem_get_cell()
213 static void gem_put_cell(struct gem *gp) in gem_put_cell()
[all …]
/Linux-v6.1/include/drm/
Ddrm_gem_ttm_helper.h19 const struct drm_gem_object *gem);
20 int drm_gem_ttm_vmap(struct drm_gem_object *gem,
22 void drm_gem_ttm_vunmap(struct drm_gem_object *gem,
24 int drm_gem_ttm_mmap(struct drm_gem_object *gem,
Ddrm_gem_vram_helper.h88 struct drm_gem_object *gem) in drm_gem_vram_of_gem() argument
90 return container_of(gem, struct drm_gem_vram_object, bo.base); in drm_gem_vram_of_gem()
/Linux-v6.1/drivers/gpu/drm/qxl/
Dqxl_gem.c68 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create()
69 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create()
70 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create()
116 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
Dqxl_object.c45 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy()
47 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy()
358 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete()
361 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete()
365 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete()
367 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
/Linux-v6.1/drivers/gpu/drm/i915/pxp/
Dintel_pxp.c275 spin_lock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate()
276 list_for_each_entry_safe(ctx, cn, &i915->gem.contexts.list, link) { in intel_pxp_invalidate()
288 spin_unlock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate()
314 spin_lock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate()
318 spin_unlock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate()
/Linux-v6.1/drivers/gpu/drm/radeon/
Dradeon_prime.c61 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
62 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table()
63 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
/Linux-v6.1/drivers/gpu/drm/rcar-du/
Drcar_du_vsp.c205 struct drm_gem_dma_object *gem = drm_fb_dma_get_gem_obj(fb, i); in rcar_du_vsp_map_fb() local
208 if (gem->sgt) { in rcar_du_vsp_map_fb()
219 ret = sg_alloc_table(sgt, gem->sgt->orig_nents, in rcar_du_vsp_map_fb()
224 src = gem->sgt->sgl; in rcar_du_vsp_map_fb()
226 for (j = 0; j < gem->sgt->orig_nents; ++j) { in rcar_du_vsp_map_fb()
233 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, in rcar_du_vsp_map_fb()
234 gem->dma_addr, gem->base.size); in rcar_du_vsp_map_fb()
/Linux-v6.1/drivers/gpu/drm/shmobile/
Dshmob_drm_plane.c44 struct drm_gem_dma_object *gem; in shmob_drm_plane_compute_base() local
48 gem = drm_fb_dma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base()
49 splane->dma[0] = gem->dma_addr + fb->offsets[0] in shmob_drm_plane_compute_base()
54 gem = drm_fb_dma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base()
55 splane->dma[1] = gem->dma_addr + fb->offsets[1] in shmob_drm_plane_compute_base()
/Linux-v6.1/drivers/gpu/drm/aspeed/
Daspeed_gfx_crtc.c171 struct drm_gem_dma_object *gem; in aspeed_gfx_pipe_update() local
188 gem = drm_fb_dma_get_gem_obj(fb, 0); in aspeed_gfx_pipe_update()
189 if (!gem) in aspeed_gfx_pipe_update()
191 writel(gem->dma_addr, priv->base + CRT_ADDR); in aspeed_gfx_pipe_update()
/Linux-v6.1/drivers/gpu/drm/fsl-dcu/
Dfsl_dcu_drm_plane.c87 struct drm_gem_dma_object *gem; in fsl_dcu_drm_plane_atomic_update() local
98 gem = drm_fb_dma_get_gem_obj(fb, 0); in fsl_dcu_drm_plane_atomic_update()
139 DCU_CTRLDESCLN(index, 3), gem->dma_addr); in fsl_dcu_drm_plane_atomic_update()

1234