/Linux-v4.19/drivers/gpu/drm/tegra/ |
D | gem.c | 27 drm_gem_object_put_unlocked(&obj->gem); in tegra_bo_put() 49 else if (obj->gem.import_attach) in tegra_bo_mmap() 50 return dma_buf_vmap(obj->gem.import_attach->dmabuf); in tegra_bo_mmap() 62 else if (obj->gem.import_attach) in tegra_bo_munmap() 63 dma_buf_vunmap(obj->gem.import_attach->dmabuf, addr); in tegra_bo_munmap() 74 else if (obj->gem.import_attach) in tegra_bo_kmap() 75 return dma_buf_kmap(obj->gem.import_attach->dmabuf, page); in tegra_bo_kmap() 88 else if (obj->gem.import_attach) in tegra_bo_kunmap() 89 dma_buf_kunmap(obj->gem.import_attach->dmabuf, page, addr); in tegra_bo_kunmap() 98 drm_gem_object_get(&obj->gem); in tegra_bo_get() [all …]
|
D | gem.h | 34 struct drm_gem_object gem; member 50 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument 52 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo() 67 void tegra_bo_free_object(struct drm_gem_object *gem); 73 int __tegra_gem_mmap(struct drm_gem_object *gem, struct vm_area_struct *vma); 77 struct drm_gem_object *gem,
|
D | drm.c | 261 struct drm_gem_object *gem; in host1x_bo_lookup() local 264 gem = drm_gem_object_lookup(file, handle); in host1x_bo_lookup() 265 if (!gem) in host1x_bo_lookup() 268 bo = to_tegra_bo(gem); in host1x_bo_lookup() 394 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 401 if (offset & 3 || offset > obj->gem.size) { in tegra_drm_submit() 424 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 432 reloc->cmdbuf.offset >= obj->gem.size) { in tegra_drm_submit() 438 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 440 if (reloc->target.offset >= obj->gem.size) { in tegra_drm_submit() [all …]
|
D | fb.c | 116 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc() 135 struct drm_gem_object *gem; in tegra_fb_create() local 147 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create() 148 if (!gem) { in tegra_fb_create() 158 if (gem->size < size) { in tegra_fb_create() 163 planes[i] = to_tegra_bo(gem); in tegra_fb_create() 176 drm_gem_object_put_unlocked(&planes[i]->gem); in tegra_fb_create() 190 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap() 194 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap() 240 drm_gem_object_put_unlocked(&bo->gem); in tegra_fbdev_probe() [all …]
|
/Linux-v4.19/drivers/gpu/drm/mediatek/ |
D | mtk_drm_fb.c | 66 struct drm_gem_object *gem; in mtk_fb_wait() local 73 gem = fb->obj[0]; in mtk_fb_wait() 74 if (!gem || !gem->dma_buf || !gem->dma_buf->resv) in mtk_fb_wait() 77 resv = gem->dma_buf->resv; in mtk_fb_wait() 92 struct drm_gem_object *gem; in mtk_drm_mode_fb_create() local 101 gem = drm_gem_object_lookup(file, cmd->handles[0]); in mtk_drm_mode_fb_create() 102 if (!gem) in mtk_drm_mode_fb_create() 109 if (gem->size < size) { in mtk_drm_mode_fb_create() 114 fb = mtk_drm_framebuffer_init(dev, cmd, gem); in mtk_drm_mode_fb_create() 123 drm_gem_object_put_unlocked(gem); in mtk_drm_mode_fb_create()
|
D | mtk_drm_plane.c | 117 struct drm_gem_object *gem; in mtk_plane_atomic_update() local 125 gem = fb->obj[0]; in mtk_plane_atomic_update() 126 mtk_gem = to_mtk_gem_obj(gem); in mtk_plane_atomic_update()
|
/Linux-v4.19/drivers/gpu/drm/vkms/ |
D | vkms_gem.c | 24 ret = drm_gem_object_init(dev, &obj->gem, size); in __vkms_gem_create() 37 struct vkms_gem_object *gem = container_of(obj, struct vkms_gem_object, in vkms_gem_free_object() local 38 gem); in vkms_gem_free_object() 40 kvfree(gem->pages); in vkms_gem_free_object() 41 mutex_destroy(&gem->pages_lock); in vkms_gem_free_object() 43 kfree(gem); in vkms_gem_free_object() 56 num_pages = DIV_ROUND_UP(obj->gem.size, PAGE_SIZE); in vkms_gem_fault() 73 mapping = file_inode(obj->gem.filp)->i_mapping; in vkms_gem_fault() 117 ret = drm_gem_handle_create(file, &obj->gem, handle); in vkms_gem_create() 118 drm_gem_object_put_unlocked(&obj->gem); in vkms_gem_create() [all …]
|
/Linux-v4.19/drivers/gpu/drm/nouveau/ |
D | nouveau_gem.c | 40 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument 42 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del() 52 if (gem->import_attach) in nouveau_gem_object_del() 53 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del() 55 drm_gem_object_release(gem); in nouveau_gem_object_del() 58 gem->filp = NULL; in nouveau_gem_object_del() 66 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument 69 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open() 140 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument 143 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close() [all …]
|
D | nouveau_gem.h | 11 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument 13 return gem ? container_of(gem, struct nouveau_bo, gem) : NULL; in nouveau_gem_object()
|
D | nouveau_ttm.c | 315 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init() 321 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init() 332 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init() 334 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init() 338 drm->gem.gart_available >> PAGE_SHIFT); in nouveau_ttm_init() 344 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init() 345 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
|
D | nouveau_display.c | 208 drm_gem_object_put_unlocked(&fb->nvbo->gem); in nouveau_user_framebuffer_destroy() 221 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); in nouveau_user_framebuffer_create_handle() 278 struct drm_gem_object *gem; in nouveau_user_framebuffer_create() local 281 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); in nouveau_user_framebuffer_create() 282 if (!gem) in nouveau_user_framebuffer_create() 284 nvbo = nouveau_gem_object(gem); in nouveau_user_framebuffer_create() 290 drm_gem_object_put_unlocked(gem); in nouveau_user_framebuffer_create() 967 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); in nouveau_display_dumb_create() 968 drm_gem_object_put_unlocked(&bo->gem); in nouveau_display_dumb_create() 977 struct drm_gem_object *gem; in nouveau_display_dumb_map_offset() local [all …]
|
/Linux-v4.19/drivers/gpu/drm/gma500/ |
D | gem.c | 34 struct gtt_range *gtt = container_of(obj, struct gtt_range, gem); in psb_gem_free_object() 78 if (drm_gem_object_init(dev, &r->gem, size) != 0) { in psb_gem_create() 85 mapping_set_gfp_mask(r->gem.filp->f_mapping, GFP_KERNEL | __GFP_DMA32); in psb_gem_create() 87 ret = drm_gem_handle_create(file, &r->gem, &handle); in psb_gem_create() 90 &r->gem, size); in psb_gem_create() 91 drm_gem_object_release(&r->gem); in psb_gem_create() 96 drm_gem_object_put_unlocked(&r->gem); in psb_gem_create() 153 r = container_of(obj, struct gtt_range, gem); /* Get the gtt range */ in psb_gem_fault()
|
D | gtt.h | 47 struct drm_gem_object gem; /* GEM high level stuff */ member 56 #define to_gtt_range(x) container_of(x, struct gtt_range, gem)
|
D | gtt.c | 211 pages = drm_gem_get_pages(>->gem); in psb_gtt_attach_pages() 215 gt->npage = gt->gem.size / PAGE_SIZE; in psb_gtt_attach_pages() 232 drm_gem_put_pages(>->gem, gt->pages, true, false); in psb_gtt_detach_pages() 249 struct drm_device *dev = gt->gem.dev; in psb_gtt_pin() 287 struct drm_device *dev = gt->gem.dev; in psb_gtt_unpin() 362 gt->gem.dev = dev; in psb_gtt_alloc_range()
|
/Linux-v4.19/Documentation/devicetree/bindings/net/ |
D | macb.txt | 4 - compatible: Should be "cdns,[<chip>-]{macb|gem}" 10 Use "cdns,pc302-gem" for Picochip picoXcell pc302 and later devices based on 11 the Cadence GEM, or the generic form: "cdns,gem". 12 Use "atmel,sama5d2-gem" for the GEM IP (10/100) available on Atmel sama5d2 SoCs. 14 Use "atmel,sama5d3-gem" for the Gigabit IP available on Atmel sama5d3 SoCs. 15 Use "atmel,sama5d4-gem" for the GEM IP (10/100) available on Atmel sama5d4 SoCs. 16 Use "cdns,zynq-gem" Xilinx Zynq-7xxx SoC. 17 Use "cdns,zynqmp-gem" for Zynq Ultrascale+ MPSoC. 25 Optional elements: 'rx_clk' applies to cdns,zynqmp-gem
|
/Linux-v4.19/drivers/net/ethernet/sun/ |
D | sungem.c | 117 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read() 145 struct gem *gp = netdev_priv(dev); in _sungem_phy_read() 149 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read() 154 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write() 178 struct gem *gp = netdev_priv(dev); in _sungem_phy_write() 182 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write() 187 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints() 193 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints() 200 static void gem_get_cell(struct gem *gp) in gem_get_cell() 214 static void gem_put_cell(struct gem *gp) in gem_put_cell() [all …]
|
/Linux-v4.19/drivers/gpu/drm/qxl/ |
D | qxl_gem.c | 69 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create() 70 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create() 71 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create() 117 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
|
D | qxl_object.c | 39 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 41 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 310 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete() 313 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete() 317 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete() 319 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
|
/Linux-v4.19/drivers/gpu/drm/hisilicon/hibmc/ |
D | hibmc_drm_drv.h | 69 struct drm_gem_object gem; member 79 static inline struct hibmc_bo *gem_to_hibmc_bo(struct drm_gem_object *gem) in gem_to_hibmc_bo() argument 81 return container_of(gem, struct hibmc_bo, gem); in gem_to_hibmc_bo()
|
/Linux-v4.19/drivers/gpu/drm/bochs/ |
D | bochs.h | 104 struct drm_gem_object gem; member 114 static inline struct bochs_bo *gem_to_bochs_bo(struct drm_gem_object *gem) in gem_to_bochs_bo() argument 116 return container_of(gem, struct bochs_bo, gem); in gem_to_bochs_bo()
|
/Linux-v4.19/drivers/gpu/drm/ |
D | drm_fb_cma_helper.c | 64 struct drm_gem_object *gem; in drm_fb_cma_get_gem_obj() local 66 gem = drm_gem_fb_get_obj(fb, plane); in drm_fb_cma_get_gem_obj() 67 if (!gem) in drm_fb_cma_get_gem_obj() 70 return to_drm_gem_cma_obj(gem); in drm_fb_cma_get_gem_obj()
|
/Linux-v4.19/drivers/gpu/drm/radeon/ |
D | radeon_prime.c | 76 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table() 77 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 78 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
|
/Linux-v4.19/drivers/gpu/drm/shmobile/ |
D | shmob_drm_plane.c | 47 struct drm_gem_cma_object *gem; in shmob_drm_plane_compute_base() local 51 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base() 52 splane->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_plane_compute_base() 57 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base() 58 splane->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_plane_compute_base()
|
/Linux-v4.19/drivers/gpu/drm/vc4/ |
D | vc4_txp.c | 252 struct drm_gem_cma_object *gem; in vc4_txp_connector_atomic_check() local 278 gem = drm_fb_cma_get_gem_obj(fb, 0); in vc4_txp_connector_atomic_check() 293 struct drm_gem_cma_object *gem; in vc4_txp_connector_atomic_commit() local 321 gem = drm_fb_cma_get_gem_obj(fb, 0); in vc4_txp_connector_atomic_commit() 322 TXP_WRITE(TXP_DST_PTR, gem->paddr + fb->offsets[0]); in vc4_txp_connector_atomic_commit()
|
/Linux-v4.19/drivers/gpu/drm/meson/ |
D | meson_plane.c | 79 struct drm_gem_cma_object *gem; in meson_plane_atomic_update() local 161 gem = drm_fb_cma_get_gem_obj(fb, 0); in meson_plane_atomic_update() 163 priv->viu.osd1_addr = gem->paddr; in meson_plane_atomic_update()
|