/Linux-v4.19/drivers/media/common/videobuf2/ |
D | videobuf2-dma-contig.c | 49 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument 52 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size() 56 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size() 96 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local 99 if (!sgt || buf->db_attach) in vb2_dc_prepare() 102 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_prepare() 109 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local 112 if (!sgt || buf->db_attach) in vb2_dc_finish() 115 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dc_finish() 221 struct sg_table sgt; member [all …]
|
D | videobuf2-dma-sg.c | 104 struct sg_table *sgt; in vb2_dma_sg_alloc() local 140 sgt = &buf->sg_table; in vb2_dma_sg_alloc() 145 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_alloc() 147 if (!sgt->nents) in vb2_dma_sg_alloc() 177 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local 183 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_put() 199 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local 205 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_prepare() 212 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local 218 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dma_sg_finish() [all …]
|
D | videobuf2-vmalloc.c | 208 struct sg_table sgt; member 218 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local 228 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 229 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 234 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vb2_vmalloc_dmabuf_ops_attach() 238 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach() 255 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local 260 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach() 264 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_detach() 266 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach() [all …]
|
/Linux-v4.19/drivers/gpu/drm/tegra/ |
D | gem.c | 30 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) in tegra_bo_pin() argument 34 *sgt = obj->sgt; in tegra_bo_pin() 39 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) in tegra_bo_unpin() argument 138 bo->size = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, in tegra_bo_iommu_map() 139 bo->sgt->nents, prot); in tegra_bo_iommu_map() 206 dma_unmap_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_bo_free() 209 sg_free_table(bo->sgt); in tegra_bo_free() 210 kfree(bo->sgt); in tegra_bo_free() 226 bo->sgt = drm_prime_pages_to_sg(bo->pages, bo->num_pages); in tegra_bo_get_pages() 227 if (IS_ERR(bo->sgt)) { in tegra_bo_get_pages() [all …]
|
/Linux-v4.19/drivers/gpu/drm/armada/ |
D | armada_gem.c | 64 if (dobj->sgt) in armada_gem_free_object() 66 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object() 379 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local 382 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf() 383 if (!sgt) in armada_gem_prime_map_dma_buf() 391 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 396 for_each_sg(sgt->sgl, sg, count, i) { in armada_gem_prime_map_dma_buf() 408 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { in armada_gem_prime_map_dma_buf() 409 num = sgt->nents; in armada_gem_prime_map_dma_buf() 414 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() [all …]
|
/Linux-v4.19/drivers/gpu/drm/udl/ |
D | udl_dmabuf.c | 26 struct sg_table sgt; member 53 struct sg_table *sgt; in udl_detach_dma_buf() local 61 sgt = &udl_attach->sgt; in udl_detach_dma_buf() 64 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf() 67 sg_free_table(sgt); in udl_detach_dma_buf() 80 struct sg_table *sgt = NULL; in udl_map_dma_buf() local 90 return &udl_attach->sgt; in udl_map_dma_buf() 107 sgt = &udl_attach->sgt; in udl_map_dma_buf() 109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf() 118 wr = sgt->sgl; in udl_map_dma_buf() [all …]
|
/Linux-v4.19/drivers/xen/ |
D | gntdev-dmabuf.c | 51 struct sg_table *sgt; member 70 struct sg_table *sgt; member 197 struct sg_table *sgt; in dmabuf_pages_to_sgt() local 200 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt() 201 if (!sgt) { in dmabuf_pages_to_sgt() 206 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt() 212 return sgt; in dmabuf_pages_to_sgt() 215 kfree(sgt); in dmabuf_pages_to_sgt() 240 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local 242 if (sgt) { in dmabuf_exp_ops_detach() [all …]
|
/Linux-v4.19/drivers/gpu/drm/rockchip/ |
D | rockchip_drm_gem.c | 46 ret = iommu_map_sg(private->domain, rk_obj->dma_addr, rk_obj->sgt->sgl, in rockchip_gem_iommu_map() 47 rk_obj->sgt->nents, prot); in rockchip_gem_iommu_map() 95 rk_obj->sgt = drm_prime_pages_to_sg(rk_obj->pages, rk_obj->num_pages); in rockchip_gem_get_pages() 96 if (IS_ERR(rk_obj->sgt)) { in rockchip_gem_get_pages() 97 ret = PTR_ERR(rk_obj->sgt); in rockchip_gem_get_pages() 108 for_each_sg(rk_obj->sgt->sgl, s, rk_obj->sgt->nents, i) in rockchip_gem_get_pages() 111 dma_sync_sg_for_device(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, in rockchip_gem_get_pages() 123 sg_free_table(rk_obj->sgt); in rockchip_gem_put_pages() 124 kfree(rk_obj->sgt); in rockchip_gem_put_pages() 373 dma_unmap_sg(drm->dev, rk_obj->sgt->sgl, in rockchip_gem_free_object() [all …]
|
/Linux-v4.19/drivers/gpu/drm/ |
D | drm_prime.c | 90 struct sg_table *sgt; member 234 struct sg_table *sgt = prime_attach->sgt; in drm_gem_map_detach() local 236 if (sgt) { in drm_gem_map_detach() 238 dma_unmap_sg_attrs(attach->dev, sgt->sgl, in drm_gem_map_detach() 239 sgt->nents, in drm_gem_map_detach() 242 sg_free_table(sgt); in drm_gem_map_detach() 245 kfree(sgt); in drm_gem_map_detach() 297 struct sg_table *sgt; in drm_gem_map_dma_buf() local 304 return prime_attach->sgt; in drm_gem_map_dma_buf() 313 sgt = obj->dev->driver->gem_prime_get_sg_table(obj); in drm_gem_map_dma_buf() [all …]
|
D | drm_gem_cma_helper.c | 192 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); in drm_gem_cma_free_object() 434 struct sg_table *sgt; in drm_gem_cma_prime_get_sg_table() local 437 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_cma_prime_get_sg_table() 438 if (!sgt) in drm_gem_cma_prime_get_sg_table() 441 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_prime_get_sg_table() 446 return sgt; in drm_gem_cma_prime_get_sg_table() 449 kfree(sgt); in drm_gem_cma_prime_get_sg_table() 474 struct sg_table *sgt) in drm_gem_cma_prime_import_sg_table() argument 478 if (sgt->nents != 1) { in drm_gem_cma_prime_import_sg_table() 480 dma_addr_t next_addr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table() [all …]
|
/Linux-v4.19/net/ceph/ |
D | crypto.c | 158 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument 170 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable() 180 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable() 186 sgt->sgl = prealloc_sg; in setup_sgtable() 187 sgt->nents = sgt->orig_nents = 1; in setup_sgtable() 190 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable() 210 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument 212 if (sgt->orig_nents > 1) in teardown_sgtable() 213 sg_free_table(sgt); in teardown_sgtable() 220 struct sg_table sgt; in ceph_aes_crypt() local [all …]
|
/Linux-v4.19/drivers/gpu/drm/etnaviv/ |
D | etnaviv_gem.c | 22 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local 29 dma_map_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); in etnaviv_gem_scatter_map() 35 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local 53 dma_unmap_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); in etnaviv_gem_scatterlist_unmap() 74 if (etnaviv_obj->sgt) { in put_pages() 76 sg_free_table(etnaviv_obj->sgt); in put_pages() 77 kfree(etnaviv_obj->sgt); in put_pages() 78 etnaviv_obj->sgt = NULL; in put_pages() 100 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages() 103 struct sg_table *sgt; in etnaviv_gem_get_pages() local [all …]
|
D | etnaviv_mmu.c | 69 struct sg_table *sgt, unsigned len, int prot) in etnaviv_iommu_map() argument 77 if (!domain || !sgt) in etnaviv_iommu_map() 80 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in etnaviv_iommu_map() 98 for_each_sg(sgt->sgl, sg, i, j) { in etnaviv_iommu_map() 108 struct sg_table *sgt, unsigned len) in etnaviv_iommu_unmap() argument 115 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in etnaviv_iommu_unmap() 134 etnaviv_obj->sgt, etnaviv_obj->base.size); in etnaviv_iommu_remove_mapping() 226 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_iommu_map_gem() local 236 sgt->nents == 1 && !(etnaviv_obj->flags & ETNA_BO_FORCE_MMU)) { in etnaviv_iommu_map_gem() 239 iova = sg_dma_address(sgt->sgl) - memory_base; in etnaviv_iommu_map_gem() [all …]
|
D | etnaviv_gem_prime.c | 81 drm_prime_gem_destroy(&etnaviv_obj->base, etnaviv_obj->sgt); in etnaviv_gem_prime_release() 105 struct dma_buf_attachment *attach, struct sg_table *sgt) in etnaviv_gem_prime_import_sg_table() argument 121 etnaviv_obj->sgt = sgt; in etnaviv_gem_prime_import_sg_table() 128 ret = drm_prime_sg_to_page_addr_arrays(sgt, etnaviv_obj->pages, in etnaviv_gem_prime_import_sg_table()
|
/Linux-v4.19/drivers/gpu/drm/v3d/ |
D | v3d_bo.c | 50 bo->sgt = drm_prime_pages_to_sg(bo->pages, npages); in v3d_bo_get_pages() 51 if (IS_ERR(bo->sgt)) { in v3d_bo_get_pages() 52 ret = PTR_ERR(bo->sgt); in v3d_bo_get_pages() 57 dma_map_sg(dev->dev, bo->sgt->sgl, in v3d_bo_get_pages() 58 bo->sgt->nents, DMA_BIDIRECTIONAL); in v3d_bo_get_pages() 64 drm_prime_sg_to_page_addr_arrays(bo->sgt, bo->pages, in v3d_bo_get_pages() 91 dma_unmap_sg(obj->dev->dev, bo->sgt->sgl, in v3d_bo_put_pages() 92 bo->sgt->nents, DMA_BIDIRECTIONAL); in v3d_bo_put_pages() 93 sg_free_table(bo->sgt); in v3d_bo_put_pages() 94 kfree(bo->sgt); in v3d_bo_put_pages() [all …]
|
/Linux-v4.19/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_ttm_buffer.c | 245 struct sg_table sgt; member 352 __sg_page_iter_start(&viter->iter, vsgt->sgt->sgl, in vmw_piter_start() 353 vsgt->sgt->orig_nents, p_offset); in vmw_piter_start() 372 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, in vmw_ttm_unmap_from_dma() 374 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; in vmw_ttm_unmap_from_dma() 395 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, in vmw_ttm_map_for_dma() 400 vmw_tt->sgt.nents = ret; in vmw_ttm_map_for_dma() 437 vsgt->sgt = &vmw_tt->sgt; in vmw_ttm_map_dma() 451 ret = sg_alloc_table_from_pages(&vmw_tt->sgt, vsgt->pages, in vmw_ttm_map_dma() 459 if (vsgt->num_pages > vmw_tt->sgt.nents) { in vmw_ttm_map_dma() [all …]
|
/Linux-v4.19/drivers/spi/ |
D | internals.h | 24 struct sg_table *sgt, void *buf, size_t len, 27 struct sg_table *sgt, enum dma_data_direction dir); 30 struct sg_table *sgt, void *buf, size_t len, in spi_map_buf() argument 37 struct device *dev, struct sg_table *sgt, in spi_unmap_buf() argument
|
D | spi-ep93xx.c | 268 struct sg_table *sgt; in ep93xx_spi_dma_prepare() local 285 sgt = &espi->rx_sgt; in ep93xx_spi_dma_prepare() 292 sgt = &espi->tx_sgt; in ep93xx_spi_dma_prepare() 313 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare() 314 sg_free_table(sgt); in ep93xx_spi_dma_prepare() 316 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare() 322 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare() 342 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 346 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare() 348 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() [all …]
|
/Linux-v4.19/drivers/fpga/ |
D | fpga-mgr.c | 98 struct sg_table *sgt) in fpga_mgr_write_init_sg() argument 112 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_write_init_sg() 127 len = sg_copy_to_buffer(sgt->sgl, sgt->nents, buf, in fpga_mgr_write_init_sg() 176 struct sg_table *sgt) in fpga_mgr_buf_load_sg() argument 180 ret = fpga_mgr_write_init_sg(mgr, info, sgt); in fpga_mgr_buf_load_sg() 187 ret = mgr->mops->write_sg(mgr, sgt); in fpga_mgr_buf_load_sg() 191 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_buf_load_sg() 252 struct sg_table sgt; in fpga_mgr_buf_load() local 293 rc = sg_alloc_table_from_pages(&sgt, pages, index, offset_in_page(buf), in fpga_mgr_buf_load() 299 rc = fpga_mgr_buf_load_sg(mgr, info, &sgt); in fpga_mgr_buf_load() [all …]
|
/Linux-v4.19/drivers/gpu/drm/exynos/ |
D | exynos_drm_gem.c | 29 struct sg_table sgt; in exynos_drm_alloc_buf() local 77 ret = dma_get_sgtable_attrs(to_dma_dev(dev), &sgt, exynos_gem->cookie, in exynos_drm_alloc_buf() 85 if (drm_prime_sg_to_page_addr_arrays(&sgt, exynos_gem->pages, NULL, in exynos_drm_alloc_buf() 92 sg_free_table(&sgt); in exynos_drm_alloc_buf() 100 sg_free_table(&sgt); in exynos_drm_alloc_buf() 164 drm_prime_gem_destroy(obj, exynos_gem->sgt); in exynos_drm_gem_destroy() 476 struct sg_table *sgt) in exynos_drm_gem_prime_import_sg_table() argument 488 exynos_gem->dma_addr = sg_dma_address(sgt->sgl); in exynos_drm_gem_prime_import_sg_table() 497 ret = drm_prime_sg_to_page_addr_arrays(sgt, exynos_gem->pages, NULL, in exynos_drm_gem_prime_import_sg_table() 502 exynos_gem->sgt = sgt; in exynos_drm_gem_prime_import_sg_table() [all …]
|
/Linux-v4.19/drivers/gpu/drm/mediatek/ |
D | mtk_drm_gem.c | 192 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local 195 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table() 196 if (!sgt) in mtk_gem_prime_get_sg_table() 199 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table() 204 kfree(sgt); in mtk_gem_prime_get_sg_table() 208 return sgt; in mtk_gem_prime_get_sg_table()
|
/Linux-v4.19/drivers/gpu/drm/omapdrm/ |
D | omap_gem_dmabuf.c | 161 struct sg_table *sgt; in omap_gem_prime_import() local 182 sgt = dma_buf_map_attachment(attach, DMA_TO_DEVICE); in omap_gem_prime_import() 183 if (IS_ERR(sgt)) { in omap_gem_prime_import() 184 ret = PTR_ERR(sgt); in omap_gem_prime_import() 188 obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt); in omap_gem_prime_import() 199 dma_buf_unmap_attachment(attach, sgt, DMA_TO_DEVICE); in omap_gem_prime_import()
|
/Linux-v4.19/drivers/gpu/drm/rcar-du/ |
D | rcar_du_vsp.c | 234 struct sg_table *sgt = &rstate->sg_tables[i]; in rcar_du_vsp_plane_prepare_fb() local 236 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, gem->paddr, in rcar_du_vsp_plane_prepare_fb() 241 ret = vsp1_du_map_sg(vsp->vsp, sgt); in rcar_du_vsp_plane_prepare_fb() 243 sg_free_table(sgt); in rcar_du_vsp_plane_prepare_fb() 257 struct sg_table *sgt = &rstate->sg_tables[i]; in rcar_du_vsp_plane_prepare_fb() local 259 vsp1_du_unmap_sg(vsp->vsp, sgt); in rcar_du_vsp_plane_prepare_fb() 260 sg_free_table(sgt); in rcar_du_vsp_plane_prepare_fb() 277 struct sg_table *sgt = &rstate->sg_tables[i]; in rcar_du_vsp_plane_cleanup_fb() local 279 vsp1_du_unmap_sg(vsp->vsp, sgt); in rcar_du_vsp_plane_cleanup_fb() 280 sg_free_table(sgt); in rcar_du_vsp_plane_cleanup_fb()
|
/Linux-v4.19/drivers/gpu/host1x/ |
D | job.c | 120 struct sg_table *sgt; in pin_job() local 129 phys_addr = host1x_bo_pin(reloc->target.bo, &sgt); in pin_job() 133 job->unpins[job->num_unpins].sgt = sgt; in pin_job() 141 struct sg_table *sgt; in pin_job() local 153 phys_addr = host1x_bo_pin(g->bo, &sgt); in pin_job() 156 for_each_sg(sgt->sgl, sg, sgt->nents, j) in pin_job() 170 sgt->sgl, sgt->nents, IOMMU_READ); in pin_job() 187 job->unpins[job->num_unpins].sgt = sgt; in pin_job() 580 host1x_bo_unpin(unpin->bo, unpin->sgt); in host1x_job_unpin()
|
/Linux-v4.19/net/smc/ |
D | smc_ib.c | 361 buf_slot->sgt[SMC_SINGLE_LINK].sgl, in smc_ib_map_mr_sg() 362 buf_slot->sgt[SMC_SINGLE_LINK].orig_nents, in smc_ib_map_mr_sg() 400 for_each_sg(buf_slot->sgt[SMC_SINGLE_LINK].sgl, sg, in smc_ib_sync_sg_for_cpu() 401 buf_slot->sgt[SMC_SINGLE_LINK].nents, i) { in smc_ib_sync_sg_for_cpu() 420 for_each_sg(buf_slot->sgt[SMC_SINGLE_LINK].sgl, sg, in smc_ib_sync_sg_for_device() 421 buf_slot->sgt[SMC_SINGLE_LINK].nents, i) { in smc_ib_sync_sg_for_device() 439 buf_slot->sgt[SMC_SINGLE_LINK].sgl, in smc_ib_buf_map_sg() 440 buf_slot->sgt[SMC_SINGLE_LINK].orig_nents, in smc_ib_buf_map_sg() 452 if (!buf_slot->sgt[SMC_SINGLE_LINK].sgl->dma_address) in smc_ib_buf_unmap_sg() 456 buf_slot->sgt[SMC_SINGLE_LINK].sgl, in smc_ib_buf_unmap_sg() [all …]
|