/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
D | base.c | 74 nvkm_instobj_dtor(struct nvkm_instmem *imem, struct nvkm_instobj *iobj) in nvkm_instobj_dtor() argument 76 spin_lock(&imem->lock); in nvkm_instobj_dtor() 78 spin_unlock(&imem->lock); in nvkm_instobj_dtor() 83 struct nvkm_instmem *imem, struct nvkm_instobj *iobj) in nvkm_instobj_ctor() argument 87 spin_lock(&imem->lock); in nvkm_instobj_ctor() 88 list_add_tail(&iobj->head, &imem->list); in nvkm_instobj_ctor() 89 spin_unlock(&imem->lock); in nvkm_instobj_ctor() 93 nvkm_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero, in nvkm_instobj_new() argument 96 struct nvkm_subdev *subdev = &imem->subdev; in nvkm_instobj_new() 101 ret = imem->func->memory_new(imem, size, align, zero, &memory); in nvkm_instobj_new() [all …]
|
D | gk20a.c | 54 struct gk20a_instmem *imem; member 145 struct gk20a_instmem *imem = obj->base.imem; in gk20a_instobj_iommu_recycle_vaddr() local 151 imem->vaddr_use -= nvkm_memory_size(&obj->base.memory); in gk20a_instobj_iommu_recycle_vaddr() 152 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", imem->vaddr_use, in gk20a_instobj_iommu_recycle_vaddr() 153 imem->vaddr_max); in gk20a_instobj_iommu_recycle_vaddr() 160 gk20a_instmem_vaddr_gc(struct gk20a_instmem *imem, const u64 size) in gk20a_instmem_vaddr_gc() argument 162 while (imem->vaddr_use + size > imem->vaddr_max) { in gk20a_instmem_vaddr_gc() 164 if (list_empty(&imem->vaddr_lru)) in gk20a_instmem_vaddr_gc() 168 list_first_entry(&imem->vaddr_lru, in gk20a_instmem_vaddr_gc() 177 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_acquire_dma() local [all …]
|
D | nv40.c | 43 struct nv40_instmem *imem; member 51 iowrite32_native(data, iobj->imem->iomem + iobj->node->offset + offset); in nv40_instobj_wr32() 58 return ioread32_native(iobj->imem->iomem + iobj->node->offset + offset); in nv40_instobj_rd32() 77 return iobj->imem->iomem + iobj->node->offset; in nv40_instobj_acquire() 102 mutex_lock(&iobj->imem->base.mutex); in nv40_instobj_dtor() 103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv40_instobj_dtor() 104 mutex_unlock(&iobj->imem->base.mutex); in nv40_instobj_dtor() 105 nvkm_instobj_dtor(&iobj->imem->base, &iobj->base); in nv40_instobj_dtor() 123 struct nv40_instmem *imem = nv40_instmem(base); in nv40_instobj_new() local 131 nvkm_instobj_ctor(&nv40_instobj_func, &imem->base, &iobj->base); in nv40_instobj_new() [all …]
|
D | nv04.c | 41 struct nv04_instmem *imem; member 49 struct nvkm_device *device = iobj->imem->base.subdev.device; in nv04_instobj_wr32() 57 struct nvkm_device *device = iobj->imem->base.subdev.device; in nv04_instobj_rd32() 76 struct nvkm_device *device = iobj->imem->base.subdev.device; in nv04_instobj_acquire() 102 mutex_lock(&iobj->imem->base.mutex); in nv04_instobj_dtor() 103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor() 104 mutex_unlock(&iobj->imem->base.mutex); in nv04_instobj_dtor() 105 nvkm_instobj_dtor(&iobj->imem->base, &iobj->base); in nv04_instobj_dtor() 123 struct nv04_instmem *imem = nv04_instmem(base); in nv04_instobj_new() local 131 nvkm_instobj_ctor(&nv04_instobj_func, &imem->base, &iobj->base); in nv04_instobj_new() [all …]
|
D | nv50.c | 47 struct nv50_instmem *imem; member 59 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_wr32_slow() local 60 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_wr32_slow() 65 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_wr32_slow() 66 if (unlikely(imem->addr != base)) { in nv50_instobj_wr32_slow() 68 imem->addr = base; in nv50_instobj_wr32_slow() 71 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_wr32_slow() 78 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_rd32_slow() local 79 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_rd32_slow() 85 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_rd32_slow() [all …]
|
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | dmanv40.c | 74 struct nvkm_instmem *imem = device->imem; in nv40_fifo_dma_engine_fini() local 88 nvkm_kmap(imem->ramfc); in nv40_fifo_dma_engine_fini() 89 nvkm_wo32(imem->ramfc, chan->ramfc + ctx, 0x00000000); in nv40_fifo_dma_engine_fini() 90 nvkm_done(imem->ramfc); in nv40_fifo_dma_engine_fini() 104 struct nvkm_instmem *imem = device->imem; in nv40_fifo_dma_engine_init() local 119 nvkm_kmap(imem->ramfc); in nv40_fifo_dma_engine_init() 120 nvkm_wo32(imem->ramfc, chan->ramfc + ctx, inst); in nv40_fifo_dma_engine_init() 121 nvkm_done(imem->ramfc); in nv40_fifo_dma_engine_init() 155 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv40_fifo_dma_object_ctor() local 171 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, in nv40_fifo_dma_object_ctor() [all …]
|
D | dmanv04.c | 39 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_object_dtor() local 42 nvkm_ramht_remove(imem->ramht, cookie); in nv04_fifo_dma_object_dtor() 51 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_object_ctor() local 67 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, in nv04_fifo_dma_object_ctor() 79 struct nvkm_memory *fctx = device->imem->ramfc; in nv04_fifo_dma_fini() 144 struct nvkm_instmem *imem = fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_dtor() local 147 nvkm_kmap(imem->ramfc); in nv04_fifo_dma_dtor() 149 nvkm_wo32(imem->ramfc, chan->ramfc + c->ctxp, 0x00000000); in nv04_fifo_dma_dtor() 151 nvkm_done(imem->ramfc); in nv04_fifo_dma_dtor() 175 struct nvkm_instmem *imem = device->imem; in nv04_fifo_dma_new() local [all …]
|
D | dmanv10.c | 46 struct nvkm_instmem *imem = device->imem; in nv10_fifo_dma_new() local 76 nvkm_kmap(imem->ramfc); in nv10_fifo_dma_new() 77 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); in nv10_fifo_dma_new() 78 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); in nv10_fifo_dma_new() 79 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); in nv10_fifo_dma_new() 80 nvkm_wo32(imem->ramfc, chan->ramfc + 0x14, in nv10_fifo_dma_new() 87 nvkm_done(imem->ramfc); in nv10_fifo_dma_new()
|
D | dmanv17.c | 46 struct nvkm_instmem *imem = device->imem; in nv17_fifo_dma_new() local 77 nvkm_kmap(imem->ramfc); in nv17_fifo_dma_new() 78 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); in nv17_fifo_dma_new() 79 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); in nv17_fifo_dma_new() 80 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); in nv17_fifo_dma_new() 81 nvkm_wo32(imem->ramfc, chan->ramfc + 0x14, in nv17_fifo_dma_new() 88 nvkm_done(imem->ramfc); in nv17_fifo_dma_new()
|
D | nv17.c | 55 struct nvkm_instmem *imem = device->imem; in nv17_fifo_init() local 56 struct nvkm_ramht *ramht = imem->ramht; in nv17_fifo_init() 57 struct nvkm_memory *ramro = imem->ramro; in nv17_fifo_init() 58 struct nvkm_memory *ramfc = imem->ramfc; in nv17_fifo_init()
|
D | nv40.c | 65 struct nvkm_instmem *imem = device->imem; in nv40_fifo_init() local 66 struct nvkm_ramht *ramht = imem->ramht; in nv40_fifo_init() 67 struct nvkm_memory *ramro = imem->ramro; in nv40_fifo_init() 68 struct nvkm_memory *ramfc = imem->ramfc; in nv40_fifo_init()
|
D | nv04.c | 335 struct nvkm_instmem *imem = device->imem; in nv04_fifo_init() local 336 struct nvkm_ramht *ramht = imem->ramht; in nv04_fifo_init() 337 struct nvkm_memory *ramro = imem->ramro; in nv04_fifo_init() 338 struct nvkm_memory *ramfc = imem->ramfc; in nv04_fifo_init()
|
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
D | base.c | 85 .imem = { 0x00000001, nv04_instmem_new }, 106 .imem = { 0x00000001, nv04_instmem_new }, 128 .imem = { 0x00000001, nv04_instmem_new }, 148 .imem = { 0x00000001, nv04_instmem_new }, 170 .imem = { 0x00000001, nv04_instmem_new }, 192 .imem = { 0x00000001, nv04_instmem_new }, 214 .imem = { 0x00000001, nv04_instmem_new }, 236 .imem = { 0x00000001, nv04_instmem_new }, 258 .imem = { 0x00000001, nv04_instmem_new }, 280 .imem = { 0x00000001, nv04_instmem_new }, [all …]
|
D | user.c | 78 struct nvkm_instmem *imem = device->imem; in nvkm_udevice_info() local 161 if (imem && args->v0.ram_size > 0) in nvkm_udevice_info() 162 args->v0.ram_user = args->v0.ram_user - imem->reserved; in nvkm_udevice_info()
|
/Linux-v6.1/drivers/remoteproc/ |
D | qcom_pil_info.c | 33 struct resource imem; in qcom_pil_info_init() local 45 ret = of_address_to_resource(np, 0, &imem); in qcom_pil_info_init() 50 base = ioremap(imem.start, resource_size(&imem)); in qcom_pil_info_init() 56 memset_io(base, 0, resource_size(&imem)); in qcom_pil_info_init() 59 _reloc.num_entries = (u32)resource_size(&imem) / PIL_RELOC_ENTRY_SIZE; in qcom_pil_info_init()
|
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/ |
D | nv40.c | 33 struct nvkm_instmem *imem = device->imem; in nv40_mpeg_mthd_dma() local 37 u32 dma0 = nvkm_instmem_rd32(imem, inst + 0); in nv40_mpeg_mthd_dma() 38 u32 dma1 = nvkm_instmem_rd32(imem, inst + 4); in nv40_mpeg_mthd_dma() 39 u32 dma2 = nvkm_instmem_rd32(imem, inst + 8); in nv40_mpeg_mthd_dma()
|
/Linux-v6.1/drivers/net/wwan/iosm/ |
D | iosm_ipc_mux.c | 13 channel_id = ipc_imem_channel_alloc(ipc_mux->imem, ipc_mux->instance_id, in ipc_mux_channel_create() 25 ipc_mux->channel = ipc_imem_channel_open(ipc_mux->imem, channel_id, in ipc_mux_channel_create() 187 ipc_imem_channel_close(ipc_mux->imem, ipc_mux->channel_id); in ipc_mux_channel_close() 223 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true); in ipc_mux_schedule() 227 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false); in ipc_mux_schedule() 236 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true); in ipc_mux_schedule() 240 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false); in ipc_mux_schedule() 279 struct iosm_imem *imem) in ipc_mux_init() argument 295 ipc_mux->pcie = imem->pcie; in ipc_mux_init() 296 ipc_mux->imem = imem; in ipc_mux_init() [all …]
|
D | iosm_ipc_protocol.c | 17 int index = ipc_protocol_msg_prep(ipc_protocol->imem, msg_type, in ipc_protocol_tq_msg_send() 25 ipc_protocol_msg_hp_update(ipc_protocol->imem); in ipc_protocol_tq_msg_send() 83 index = ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_msg_send() 100 ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_msg_send() 182 ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_suspend() 235 ipc_protocol->imem = ipc_imem; in ipc_protocol_init()
|
D | iosm_ipc_pcie.c | 43 ipc_imem_cleanup(ipc_pcie->imem); in ipc_pcie_cleanup() 53 kfree(ipc_pcie->imem); in ipc_pcie_deinit() 314 ipc_pcie->imem = ipc_imem_init(ipc_pcie, pci->device, in ipc_pcie_probe() 316 if (!ipc_pcie->imem) { in ipc_pcie_probe() 354 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, true); in ipc_pcie_suspend_s2idle() 365 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, false); in ipc_pcie_resume_s2idle() 380 ipc_imem_pm_suspend(ipc_pcie->imem); in ipc_pcie_suspend() 391 ipc_imem_pm_resume(ipc_pcie->imem); in ipc_pcie_resume()
|
D | iosm_ipc_task_queue.c | 140 int ipc_task_queue_send_task(struct iosm_imem *imem, in ipc_task_queue_send_task() argument 157 ret = ipc_task_queue_add_task(imem, arg, copy, func, in ipc_task_queue_send_task() 160 dev_err(imem->ipc_task->dev, in ipc_task_queue_send_task()
|
D | iosm_ipc_mux_codec.c | 20 ipc_imem_ul_send(ipc_mux->imem); in ipc_mux_tq_cmd_send() 28 int ret = ipc_task_queue_send_task(ipc_mux->imem, ipc_mux_tq_cmd_send, in ipc_mux_acb_send() 49 ipc_uevent_send(ipc_mux->imem->dev, UEVENT_MDM_TIMEOUT); in ipc_mux_acb_send() 252 adb_timer = &ipc_mux->imem->adb_timer; in ipc_mux_dl_cmds_decode_process() 902 ul_data_pend = ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_ul_adb_finish() 906 ipc_imem_td_update_timer_start(ipc_mux->imem); in ipc_mux_ul_adb_finish() 1005 (void)ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_lite_send_qlt() 1163 (void)ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_ul_adgh_encode() 1494 ipc_imem_adb_timer_start(ipc_mux->imem); in ipc_mux_tq_ul_trigger_encode() 1497 ipc_imem_td_update_timer_start(ipc_mux->imem); in ipc_mux_tq_ul_trigger_encode() [all …]
|
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/core/ |
D | memory.c | 141 struct nvkm_instmem *imem = device->imem; in nvkm_memory_new() local 145 if (unlikely(target != NVKM_MEM_TARGET_INST || !imem)) in nvkm_memory_new() 148 ret = nvkm_instobj_new(imem, size, align, zero, &memory); in nvkm_memory_new()
|
/Linux-v6.1/drivers/memory/ |
D | brcmstb_dpfe.c | 185 void __iomem *imem; member 570 u32 __iomem *imem = priv->imem; in __verify_fw_checksum() local 589 sum += readl_relaxed(imem + i); in __verify_fw_checksum() 625 const u32 *dmem, *imem; in brcmstb_dpfe_download_firmware() local 675 imem = fw_blob; in brcmstb_dpfe_download_firmware() 682 ret = __write_firmware(priv->imem, imem, imem_size, is_big_endian); in brcmstb_dpfe_download_firmware() 883 priv->imem = devm_platform_ioremap_resource_byname(pdev, "dpfe-imem"); in brcmstb_dpfe_probe() 884 if (IS_ERR(priv->imem)) { in brcmstb_dpfe_probe()
|
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/subdev/acr/ |
D | hsfw.c | 33 kfree(hsfw->imem); in nvkm_acr_hsfw_del() 145 hsfw->imem = kmemdup(data + desc->code_off, desc->code_size, GFP_KERNEL); in nvkm_acr_hsfw_load_bl() 147 if (!hsfw->imem) in nvkm_acr_hsfw_load_bl()
|
D | priv.h | 60 u32 *imem; member 99 u32 *imem; member
|