Home
last modified time | relevance | path

Searched refs:sgt (Results 1 – 25 of 153) sorted by relevance

1234567

/Linux-v5.4/drivers/media/common/videobuf2/
Dvideobuf2-dma-contig.c49 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument
52 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
56 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size()
96 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local
99 if (!sgt || buf->db_attach) in vb2_dc_prepare()
102 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_prepare()
109 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local
112 if (!sgt || buf->db_attach) in vb2_dc_finish()
115 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dc_finish()
215 struct sg_table sgt; member
[all …]
Dvideobuf2-dma-sg.c104 struct sg_table *sgt; in vb2_dma_sg_alloc() local
140 sgt = &buf->sg_table; in vb2_dma_sg_alloc()
145 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_alloc()
147 if (!sgt->nents) in vb2_dma_sg_alloc()
177 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local
183 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_put()
199 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local
205 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_prepare()
212 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local
218 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dma_sg_finish()
[all …]
Dvideobuf2-vmalloc.c207 struct sg_table sgt; member
217 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local
227 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach()
228 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach()
233 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vb2_vmalloc_dmabuf_ops_attach()
237 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach()
254 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local
259 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach()
263 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_detach()
265 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach()
[all …]
/Linux-v5.4/drivers/gpu/drm/tegra/
Dgem.c30 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) in tegra_bo_pin() argument
34 *sgt = obj->sgt; in tegra_bo_pin()
39 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) in tegra_bo_unpin() argument
138 bo->size = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, in tegra_bo_iommu_map()
139 bo->sgt->nents, prot); in tegra_bo_iommu_map()
206 dma_unmap_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_bo_free()
209 sg_free_table(bo->sgt); in tegra_bo_free()
210 kfree(bo->sgt); in tegra_bo_free()
226 bo->sgt = drm_prime_pages_to_sg(bo->pages, bo->num_pages); in tegra_bo_get_pages()
227 if (IS_ERR(bo->sgt)) { in tegra_bo_get_pages()
[all …]
/Linux-v5.4/drivers/gpu/drm/armada/
Darmada_gem.c66 if (dobj->sgt) in armada_gem_free_object()
68 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object()
381 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local
384 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf()
385 if (!sgt) in armada_gem_prime_map_dma_buf()
393 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
398 for_each_sg(sgt->sgl, sg, count, i) { in armada_gem_prime_map_dma_buf()
410 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { in armada_gem_prime_map_dma_buf()
411 num = sgt->nents; in armada_gem_prime_map_dma_buf()
416 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
[all …]
/Linux-v5.4/drivers/gpu/drm/udl/
Dudl_dmabuf.c16 struct sg_table sgt; member
43 struct sg_table *sgt; in udl_detach_dma_buf() local
51 sgt = &udl_attach->sgt; in udl_detach_dma_buf()
54 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf()
57 sg_free_table(sgt); in udl_detach_dma_buf()
70 struct sg_table *sgt = NULL; in udl_map_dma_buf() local
80 return &udl_attach->sgt; in udl_map_dma_buf()
97 sgt = &udl_attach->sgt; in udl_map_dma_buf()
99 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf()
108 wr = sgt->sgl; in udl_map_dma_buf()
[all …]
/Linux-v5.4/drivers/hwtracing/intel_th/
Dmsu-sink.c51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument
64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window()
68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window()
70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window()
81 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument
87 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window()
92 sg_free_table(sgt); in msu_sink_free_window()
96 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument
100 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
/Linux-v5.4/drivers/xen/
Dgntdev-dmabuf.c51 struct sg_table *sgt; member
70 struct sg_table *sgt; member
203 struct sg_table *sgt; in dmabuf_pages_to_sgt() local
206 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt()
207 if (!sgt) { in dmabuf_pages_to_sgt()
212 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt()
218 return sgt; in dmabuf_pages_to_sgt()
221 kfree(sgt); in dmabuf_pages_to_sgt()
246 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local
248 if (sgt) { in dmabuf_exp_ops_detach()
[all …]
/Linux-v5.4/drivers/gpu/drm/rockchip/
Drockchip_drm_gem.c38 ret = iommu_map_sg(private->domain, rk_obj->dma_addr, rk_obj->sgt->sgl, in rockchip_gem_iommu_map()
39 rk_obj->sgt->nents, prot); in rockchip_gem_iommu_map()
87 rk_obj->sgt = drm_prime_pages_to_sg(rk_obj->pages, rk_obj->num_pages); in rockchip_gem_get_pages()
88 if (IS_ERR(rk_obj->sgt)) { in rockchip_gem_get_pages()
89 ret = PTR_ERR(rk_obj->sgt); in rockchip_gem_get_pages()
100 for_each_sg(rk_obj->sgt->sgl, s, rk_obj->sgt->nents, i) in rockchip_gem_get_pages()
103 dma_sync_sg_for_device(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, in rockchip_gem_get_pages()
115 sg_free_table(rk_obj->sgt); in rockchip_gem_put_pages()
116 kfree(rk_obj->sgt); in rockchip_gem_put_pages()
352 dma_unmap_sg(drm->dev, rk_obj->sgt->sgl, in rockchip_gem_free_object()
[all …]
/Linux-v5.4/drivers/gpu/drm/
Ddrm_gem_shmem_helper.c116 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_free_object()
119 if (shmem->sgt) { in drm_gem_shmem_free_object()
120 dma_unmap_sg(obj->dev->dev, shmem->sgt->sgl, in drm_gem_shmem_free_object()
121 shmem->sgt->nents, DMA_BIDIRECTIONAL); in drm_gem_shmem_free_object()
122 sg_free_table(shmem->sgt); in drm_gem_shmem_free_object()
123 kfree(shmem->sgt); in drm_gem_shmem_free_object()
393 dma_unmap_sg(obj->dev->dev, shmem->sgt->sgl, in drm_gem_shmem_purge_locked()
394 shmem->sgt->nents, DMA_BIDIRECTIONAL); in drm_gem_shmem_purge_locked()
395 sg_free_table(shmem->sgt); in drm_gem_shmem_purge_locked()
396 kfree(shmem->sgt); in drm_gem_shmem_purge_locked()
[all …]
Ddrm_gem_cma_helper.c185 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); in drm_gem_cma_free_object()
430 struct sg_table *sgt; in drm_gem_cma_prime_get_sg_table() local
433 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_cma_prime_get_sg_table()
434 if (!sgt) in drm_gem_cma_prime_get_sg_table()
437 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_prime_get_sg_table()
442 return sgt; in drm_gem_cma_prime_get_sg_table()
445 kfree(sgt); in drm_gem_cma_prime_get_sg_table()
470 struct sg_table *sgt) in drm_gem_cma_prime_import_sg_table() argument
474 if (sgt->nents != 1) { in drm_gem_cma_prime_import_sg_table()
476 dma_addr_t next_addr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table()
[all …]
Ddrm_prime.c617 struct sg_table *sgt; in drm_gem_map_dma_buf() local
623 sgt = obj->funcs->get_sg_table(obj); in drm_gem_map_dma_buf()
625 sgt = obj->dev->driver->gem_prime_get_sg_table(obj); in drm_gem_map_dma_buf()
627 if (!dma_map_sg_attrs(attach->dev, sgt->sgl, sgt->nents, dir, in drm_gem_map_dma_buf()
629 sg_free_table(sgt); in drm_gem_map_dma_buf()
630 kfree(sgt); in drm_gem_map_dma_buf()
631 sgt = ERR_PTR(-ENOMEM); in drm_gem_map_dma_buf()
634 return sgt; in drm_gem_map_dma_buf()
647 struct sg_table *sgt, in drm_gem_unmap_dma_buf() argument
650 if (!sgt) in drm_gem_unmap_dma_buf()
[all …]
/Linux-v5.4/drivers/staging/media/tegra-vde/
Ddmabuf-cache.c24 struct sg_table *sgt; member
38 dma_buf_unmap_attachment(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry()
69 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local
90 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map()
102 sgt = dma_buf_map_attachment(attachment, dma_dir); in tegra_vde_dmabuf_cache_map()
103 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map()
105 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map()
109 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map()
122 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map()
128 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map()
[all …]
/Linux-v5.4/net/ceph/
Dcrypto.c160 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument
172 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable()
182 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable()
188 sgt->sgl = prealloc_sg; in setup_sgtable()
189 sgt->nents = sgt->orig_nents = 1; in setup_sgtable()
192 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable()
212 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument
214 if (sgt->orig_nents > 1) in teardown_sgtable()
215 sg_free_table(sgt); in teardown_sgtable()
222 struct sg_table sgt; in ceph_aes_crypt() local
[all …]
/Linux-v5.4/drivers/gpu/drm/etnaviv/
Detnaviv_gem.c23 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local
30 dma_map_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); in etnaviv_gem_scatter_map()
36 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local
54 dma_unmap_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); in etnaviv_gem_scatterlist_unmap()
75 if (etnaviv_obj->sgt) { in put_pages()
77 sg_free_table(etnaviv_obj->sgt); in put_pages()
78 kfree(etnaviv_obj->sgt); in put_pages()
79 etnaviv_obj->sgt = NULL; in put_pages()
101 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages()
104 struct sg_table *sgt; in etnaviv_gem_get_pages() local
[all …]
Detnaviv_mmu.c73 struct sg_table *sgt, unsigned len, int prot) in etnaviv_iommu_map() argument
79 if (!context || !sgt) in etnaviv_iommu_map()
82 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in etnaviv_iommu_map()
100 for_each_sg(sgt->sgl, sg, i, j) { in etnaviv_iommu_map()
110 struct sg_table *sgt, unsigned len) in etnaviv_iommu_unmap() argument
116 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in etnaviv_iommu_unmap()
135 etnaviv_obj->sgt, etnaviv_obj->base.size); in etnaviv_iommu_remove_mapping()
234 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_iommu_map_gem() local
244 sgt->nents == 1 && !(etnaviv_obj->flags & ETNA_BO_FORCE_MMU)) { in etnaviv_iommu_map_gem()
247 iova = sg_dma_address(sgt->sgl) - memory_base; in etnaviv_iommu_map_gem()
[all …]
/Linux-v5.4/drivers/gpu/drm/mediatek/
Dmtk_drm_gem.c192 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local
195 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table()
196 if (!sgt) in mtk_gem_prime_get_sg_table()
199 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table()
204 kfree(sgt); in mtk_gem_prime_get_sg_table()
208 return sgt; in mtk_gem_prime_get_sg_table()
248 struct sg_table *sgt; in mtk_drm_gem_prime_vmap() local
256 sgt = mtk_gem_prime_get_sg_table(obj); in mtk_drm_gem_prime_vmap()
257 if (IS_ERR(sgt)) in mtk_drm_gem_prime_vmap()
265 for_each_sg_page(sgt->sgl, &iter, sgt->orig_nents, 0) { in mtk_drm_gem_prime_vmap()
[all …]
/Linux-v5.4/drivers/gpu/drm/lima/
Dlima_object.c12 if (bo->sgt) { in lima_bo_destroy()
14 drm_prime_gem_destroy(&bo->gem, bo->sgt); in lima_bo_destroy()
60 u32 flags, struct sg_table *sgt) in lima_bo_create() argument
78 if (sgt) { in lima_bo_create()
79 bo->sgt = sgt; in lima_bo_create()
88 sgt, bo->pages, bo->pages_dma_addr, npages); in lima_bo_create()
/Linux-v5.4/drivers/gpu/drm/vmwgfx/
Dvmwgfx_ttm_buffer.c245 struct sg_table sgt; member
345 __sg_page_iter_start(&viter->iter.base, vsgt->sgt->sgl, in vmw_piter_start()
346 vsgt->sgt->orig_nents, p_offset); in vmw_piter_start()
365 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, in vmw_ttm_unmap_from_dma()
367 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; in vmw_ttm_unmap_from_dma()
388 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, in vmw_ttm_map_for_dma()
393 vmw_tt->sgt.nents = ret; in vmw_ttm_map_for_dma()
430 vsgt->sgt = &vmw_tt->sgt; in vmw_ttm_map_dma()
445 (&vmw_tt->sgt, vsgt->pages, vsgt->num_pages, 0, in vmw_ttm_map_dma()
452 if (vsgt->num_pages > vmw_tt->sgt.nents) { in vmw_ttm_map_dma()
[all …]
/Linux-v5.4/drivers/spi/
Dinternals.h24 struct sg_table *sgt, void *buf, size_t len,
27 struct sg_table *sgt, enum dma_data_direction dir);
30 struct sg_table *sgt, void *buf, size_t len, in spi_map_buf() argument
37 struct device *dev, struct sg_table *sgt, in spi_unmap_buf() argument
Dspi-ep93xx.c277 struct sg_table *sgt; in ep93xx_spi_dma_prepare() local
294 sgt = &espi->rx_sgt; in ep93xx_spi_dma_prepare()
301 sgt = &espi->tx_sgt; in ep93xx_spi_dma_prepare()
322 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare()
323 sg_free_table(sgt); in ep93xx_spi_dma_prepare()
325 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare()
331 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
351 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
355 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, in ep93xx_spi_dma_prepare()
358 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
[all …]
/Linux-v5.4/drivers/fpga/
Dfpga-mgr.c98 struct sg_table *sgt) in fpga_mgr_write_init_sg() argument
112 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_write_init_sg()
127 len = sg_copy_to_buffer(sgt->sgl, sgt->nents, buf, in fpga_mgr_write_init_sg()
176 struct sg_table *sgt) in fpga_mgr_buf_load_sg() argument
180 ret = fpga_mgr_write_init_sg(mgr, info, sgt); in fpga_mgr_buf_load_sg()
187 ret = mgr->mops->write_sg(mgr, sgt); in fpga_mgr_buf_load_sg()
191 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_buf_load_sg()
252 struct sg_table sgt; in fpga_mgr_buf_load() local
293 rc = sg_alloc_table_from_pages(&sgt, pages, index, offset_in_page(buf), in fpga_mgr_buf_load()
299 rc = fpga_mgr_buf_load_sg(mgr, info, &sgt); in fpga_mgr_buf_load()
[all …]
/Linux-v5.4/include/linux/
Dintel_th.h43 int (*alloc_window)(void *priv, struct sg_table **sgt,
45 void (*free_window)(void *priv, struct sg_table *sgt);
59 int (*ready)(void *priv, struct sg_table *sgt, size_t bytes);
65 void intel_th_msc_window_unlock(struct device *dev, struct sg_table *sgt);
/Linux-v5.4/drivers/staging/kpc2000/kpc_dma/
Dfileops.c88 …rv = sg_alloc_table_from_pages(&acd->sgt, acd->user_pages, acd->page_count, iov_base & (PAGE_SIZE-… in kpc_dma_transfer()
95 acd->mapped_entry_count = dma_map_sg(&ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, ldev->dir); in kpc_dma_transfer()
102 for_each_sg(acd->sgt.sgl, sg, acd->mapped_entry_count, i) { in kpc_dma_transfer()
125 for_each_sg(acd->sgt.sgl, sg, acd->mapped_entry_count, i) { in kpc_dma_transfer()
189 dma_unmap_sg(&ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, ldev->dir); in kpc_dma_transfer()
190 sg_free_table(&acd->sgt); in kpc_dma_transfer()
210 BUG_ON(acd->sgt.sgl == NULL); in transfer_complete_cb()
220 dma_unmap_sg(&acd->ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, acd->ldev->dir); in transfer_complete_cb()
226 sg_free_table(&acd->sgt); in transfer_complete_cb()
/Linux-v5.4/drivers/gpu/drm/exynos/
Dexynos_drm_gem.c25 struct sg_table sgt; in exynos_drm_alloc_buf() local
73 ret = dma_get_sgtable_attrs(to_dma_dev(dev), &sgt, exynos_gem->cookie, in exynos_drm_alloc_buf()
81 if (drm_prime_sg_to_page_addr_arrays(&sgt, exynos_gem->pages, NULL, in exynos_drm_alloc_buf()
88 sg_free_table(&sgt); in exynos_drm_alloc_buf()
96 sg_free_table(&sgt); in exynos_drm_alloc_buf()
161 drm_prime_gem_destroy(obj, exynos_gem->sgt); in exynos_drm_gem_destroy()
475 struct sg_table *sgt) in exynos_drm_gem_prime_import_sg_table() argument
487 exynos_gem->dma_addr = sg_dma_address(sgt->sgl); in exynos_drm_gem_prime_import_sg_table()
496 ret = drm_prime_sg_to_page_addr_arrays(sgt, exynos_gem->pages, NULL, in exynos_drm_gem_prime_import_sg_table()
501 exynos_gem->sgt = sgt; in exynos_drm_gem_prime_import_sg_table()
[all …]

1234567