/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
D | vmm.c | 75 struct nvkm_vmm *vmm; member 113 VMM_TRACE(_it->vmm, "%s "f, _buf, ##a); \ 129 if (it->vmm->func->flush) { in nvkm_vmm_flush() 131 it->vmm->func->flush(it->vmm, it->flush); in nvkm_vmm_flush() 145 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_pdes() local 159 func->sparse(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes() 162 func->unmap(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes() 170 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes() 177 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes() 190 nvkm_mmu_ptc_put(vmm->mmu, vmm->bootstrapped, &pt); in nvkm_vmm_unref_pdes() [all …]
|
D | vmmnv44.c | 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument 39 u32 addr = (list ? *list++ : vmm->null) >> 12; in nv44_vmm_pgt_fill() 66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill() 67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill() 68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill() 69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill() 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument 82 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); in nv44_vmm_pgt_pte() 90 VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vmm_pgt_pte() 91 VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vmm_pgt_pte() [all …]
|
D | uvmm.c | 42 return nvkm_uvmm(object)->vmm; in nvkm_uvmm_search() 52 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_unmap() local 62 mutex_lock(&vmm->mutex); in nvkm_uvmm_mthd_unmap() 63 vma = nvkm_vmm_node_search(vmm, addr); in nvkm_uvmm_mthd_unmap() 65 VMM_DEBUG(vmm, "lookup %016llx: %016llx", in nvkm_uvmm_mthd_unmap() 71 VMM_DEBUG(vmm, "denied %016llx: %d %d %d", addr, in nvkm_uvmm_mthd_unmap() 77 VMM_DEBUG(vmm, "unmapped"); in nvkm_uvmm_mthd_unmap() 81 nvkm_vmm_unmap_locked(vmm, vma); in nvkm_uvmm_mthd_unmap() 84 mutex_unlock(&vmm->mutex); in nvkm_uvmm_mthd_unmap() 96 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_map() local [all …]
|
D | vmmgp100.c | 31 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pte() argument 39 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pte() 45 gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_sgl() argument 48 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); in gp100_vmm_pgt_sgl() 52 gp100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_dma() argument 56 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gp100_vmm_pgt_dma() 60 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_dma() 67 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); in gp100_vmm_pgt_dma() 71 gp100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_mem() argument 74 VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); in gp100_vmm_pgt_mem() [all …]
|
D | vmmnv50.c | 32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_pte() argument 53 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_pte() 58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl() 65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_dma() argument 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_dma() 84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_mem() argument 87 VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_mem() [all …]
|
D | vmmgf100.c | 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument 44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl() 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma() 84 gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_mem() argument [all …]
|
D | vmmnv04.c | 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument 33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte() 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl() 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument 52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma() 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma() 60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv04_vmm_pgt_unmap() argument 63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap() 80 nv04_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, in nv04_vmm_valid() argument [all …]
|
D | vmmnv41.c | 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument 32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte() 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument 52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma() 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma() 61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv41_vmm_pgt_unmap() argument 64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap() 81 nv41_vmm_flush(struct nvkm_vmm *vmm, int level) in nv41_vmm_flush() argument [all …]
|
D | vmmgm200.c | 28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgt_sparse() argument 32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse() 53 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgd_sparse() argument 57 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); in gm200_vmm_pgd_sparse() 96 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) in gm200_vmm_join_() argument 98 if (vmm->func->page[1].shift == 16) in gm200_vmm_join_() 100 return gf100_vmm_join_(vmm, inst, base); in gm200_vmm_join_() 104 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) in gm200_vmm_join() argument 106 return gm200_vmm_join_(vmm, inst, 0); in gm200_vmm_join()
|
D | nv44.c | 35 struct nvkm_memory *pt = mmu->vmm->pd->pt[0]->memory; in nv44_mmu_init() 46 nvkm_wr32(device, 0x100818, mmu->vmm->null); in nv44_mmu_init() 61 .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv44_vmm_new, true },
|
D | gm20b.c | 34 .vmm = {{ -1, 0, NVIF_CLASS_VMM_GM200}, gm20b_vmm_new }, 44 .vmm = {{ -1, -1, NVIF_CLASS_VMM_GM200}, gm20b_vmm_new_fixed },
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvif/ |
D | vmm.c | 28 nvif_vmm_unmap(struct nvif_vmm *vmm, u64 addr) in nvif_vmm_unmap() argument 30 return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_UNMAP, in nvif_vmm_unmap() 36 nvif_vmm_map(struct nvif_vmm *vmm, u64 addr, u64 size, void *argv, u32 argc, in nvif_vmm_map() argument 57 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_MAP, in nvif_vmm_map() 65 nvif_vmm_put(struct nvif_vmm *vmm, struct nvif_vma *vma) in nvif_vmm_put() argument 68 WARN_ON(nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PUT, in nvif_vmm_put() 77 nvif_vmm_get(struct nvif_vmm *vmm, enum nvif_vmm_get type, bool sparse, in nvif_vmm_get() argument 98 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_GET, in nvif_vmm_get() 108 nvif_vmm_fini(struct nvif_vmm *vmm) in nvif_vmm_fini() argument 110 kfree(vmm->page); in nvif_vmm_fini() [all …]
|
/Linux-v4.19/drivers/gpu/drm/nouveau/ |
D | nouveau_vmm.c | 31 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap() 40 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map() 48 nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) in nouveau_vma_find() argument 53 if (vma->vmm == vmm) in nouveau_vma_find() 67 nvif_vmm_put(&vma->vmm->vmm, &tmp); in nouveau_vma_del() 76 nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, in nouveau_vma_new() argument 84 if ((vma = *pvma = nouveau_vma_find(nvbo, vmm))) { in nouveau_vma_new() 91 vma->vmm = vmm; in nouveau_vma_new() 100 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new() 108 ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, in nouveau_vma_new() [all …]
|
D | nouveau_mem.c | 37 struct nvif_vmm *vmm, struct nvif_vma *vma) in nouveau_mem_map() argument 47 switch (vmm->object.oclass) { in nouveau_mem_map() 76 super = vmm->object.client->super; in nouveau_mem_map() 77 vmm->object.client->super = true; in nouveau_mem_map() 78 ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, in nouveau_mem_map() 80 vmm->object.client->super = super; in nouveau_mem_map() 87 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 88 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini()
|
D | nouveau_chan.c | 159 ret = nouveau_vma_new(chan->push.buffer, &cli->vmm, in nouveau_channel_prep() 174 args.limit = cli->vmm.vmm.limit - 1; in nouveau_channel_prep() 204 args.limit = cli->vmm.vmm.limit - 1; in nouveau_channel_prep() 255 args.kepler.vmm = nvif_handle(&cli->vmm.vmm.object); in nouveau_channel_ind() 262 args.fermi.vmm = nvif_handle(&cli->vmm.vmm.object); in nouveau_channel_ind() 269 args.nv50.vmm = nvif_handle(&cli->vmm.vmm.object); in nouveau_channel_ind() 359 args.limit = cli->vmm.vmm.limit - 1; in nouveau_channel_init() 376 args.limit = cli->vmm.vmm.limit - 1; in nouveau_channel_init() 388 args.limit = cli->vmm.vmm.limit - 1; in nouveau_channel_init()
|
D | nouveau_gem.c | 75 if (cli->vmm.vmm.object.oclass < NVIF_CLASS_VMM_NV50) in nouveau_gem_object_open() 86 ret = nouveau_vma_new(nvbo, &cli->vmm, &vma); in nouveau_gem_object_open() 136 nouveau_cli_work_queue(vma->vmm->cli, fence, &work->work); in nouveau_gem_object_unmap() 149 if (cli->vmm.vmm.object.oclass < NVIF_CLASS_VMM_NV50) in nouveau_gem_object_close() 156 vma = nouveau_vma_find(nvbo, &cli->vmm); in nouveau_gem_object_close() 232 if (cli->vmm.vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in nouveau_gem_info() 233 vma = nouveau_vma_find(nvbo, &cli->vmm); in nouveau_gem_info() 341 if (drm->client.vmm.vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in validate_fini_no_ticket() 434 if (cli->vmm.vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in validate_init() 435 struct nouveau_vmm *vmm = &cli->vmm; in validate_init() local [all …]
|
D | nouveau_vmm.h | 8 struct nouveau_vmm *vmm; member 27 struct nvif_vmm vmm; member
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/bar/ |
D | gf100.c | 34 return gf100_bar(base)->bar[1].vmm; in gf100_bar_bar1_vmm() 63 return gf100_bar(base)->bar[0].vmm; in gf100_bar_bar2_vmm() 101 (bar_nr == 3) ? "bar2" : "bar1", &bar_vm->vmm); in gf100_bar_oneinit_bar() 105 atomic_inc(&bar_vm->vmm->engref[NVKM_SUBDEV_BAR]); in gf100_bar_oneinit_bar() 106 bar_vm->vmm->debug = bar->base.subdev.debug; in gf100_bar_oneinit_bar() 112 ret = nvkm_vmm_boot(bar_vm->vmm); in gf100_bar_oneinit_bar() 117 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); in gf100_bar_oneinit_bar() 151 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); in gf100_bar_dtor() 152 nvkm_vmm_unref(&bar->bar[1].vmm); in gf100_bar_dtor() 155 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); in gf100_bar_dtor() [all …]
|
/Linux-v4.19/arch/xtensa/kernel/ |
D | syscall.c | 65 struct vm_area_struct *vmm; in arch_get_unmapped_area() local 87 for (vmm = find_vma(current->mm, addr); ; vmm = vmm->vm_next) { in arch_get_unmapped_area() 91 if (!vmm || addr + len <= vm_start_gap(vmm)) in arch_get_unmapped_area() 93 addr = vmm->vm_end; in arch_get_unmapped_area()
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
D | nv50.c | 120 nv50_instobj_kmap(struct nv50_instobj *iobj, struct nvkm_vmm *vmm) in nv50_instobj_kmap() argument 137 while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { in nv50_instobj_kmap() 158 nvkm_vmm_put(vmm, &ebar); in nv50_instobj_kmap() 162 ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); in nv50_instobj_kmap() 167 nvkm_vmm_put(vmm, &bar); in nv50_instobj_kmap() 178 nvkm_vmm_put(vmm, &iobj->bar); in nv50_instobj_kmap() 183 nv50_instobj_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, in nv50_instobj_map() argument 187 return nvkm_memory_map(memory, offset, vmm, vma, argv, argc); in nv50_instobj_map() 220 struct nvkm_vmm *vmm; in nv50_instobj_acquire() local 237 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire() [all …]
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/secboot/ |
D | gm200.c | 51 ret = nvkm_vmm_get(gsb->vmm, 12, blob->size, &vma); in gm200_secboot_run_blob() 57 ret = nvkm_memory_map(blob, 0, gsb->vmm, vma, NULL, 0); in gm200_secboot_run_blob() 98 nvkm_vmm_put(gsb->vmm, &vma); in gm200_secboot_run_blob() 118 &gsb->vmm); in gm200_secboot_oneinit() 122 atomic_inc(&gsb->vmm->engref[NVKM_SUBDEV_PMU]); in gm200_secboot_oneinit() 123 gsb->vmm->debug = gsb->base.subdev.debug; in gm200_secboot_oneinit() 125 ret = nvkm_vmm_join(gsb->vmm, gsb->inst); in gm200_secboot_oneinit() 156 nvkm_vmm_part(gsb->vmm, gsb->inst); in gm200_secboot_dtor() 157 nvkm_vmm_unref(&gsb->vmm); in gm200_secboot_dtor()
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | chan.c | 120 if (chan->vmm) in nvkm_fifo_chan_child_del() 121 atomic_dec(&chan->vmm->engref[engine->subdev.index]); in nvkm_fifo_chan_child_del() 154 if (chan->vmm) in nvkm_fifo_chan_child_new() 155 atomic_inc(&chan->vmm->engref[engine->subdev.index]); in nvkm_fifo_chan_child_new() 330 if (chan->vmm) { in nvkm_fifo_chan_dtor() 331 nvkm_vmm_part(chan->vmm, chan->inst->memory); in nvkm_fifo_chan_dtor() 332 nvkm_vmm_unref(&chan->vmm); in nvkm_fifo_chan_dtor() 390 struct nvkm_vmm *vmm = nvkm_uvmm_search(client, hvmm); in nvkm_fifo_chan_ctor() local 391 if (IS_ERR(vmm)) in nvkm_fifo_chan_ctor() 392 return PTR_ERR(vmm); in nvkm_fifo_chan_ctor() [all …]
|
D | gpfifogk104.c | 154 nvkm_vmm_put(chan->base.vmm, &chan->engn[engine->subdev.index].vma); in gk104_fifo_gpfifo_engine_dtor() 174 ret = nvkm_vmm_get(chan->base.vmm, 12, chan->engn[engn].inst->size, in gk104_fifo_gpfifo_engine_ctor() 179 return nvkm_memory_map(chan->engn[engn].inst, 0, chan->base.vmm, in gk104_fifo_gpfifo_engine_ctor() 243 u64 vmm, u64 ioffset, u64 ilength, in gk104_fifo_gpfifo_new_() argument 253 if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) in gk104_fifo_gpfifo_new_() 275 0x1000, 0x1000, true, vmm, 0, subdevs, in gk104_fifo_gpfifo_new_() 341 args->v0.version, args->v0.vmm, args->v0.ioffset, in gk104_fifo_gpfifo_new() 346 args->v0.vmm, in gk104_fifo_gpfifo_new()
|
D | gpfifogf100.c | 129 nvkm_vmm_put(chan->base.vmm, &chan->engn[engine->subdev.index].vma); in gf100_fifo_gpfifo_engine_dtor() 149 ret = nvkm_vmm_get(chan->base.vmm, 12, chan->engn[engn].inst->size, in gf100_fifo_gpfifo_engine_ctor() 154 return nvkm_memory_map(chan->engn[engn].inst, 0, chan->base.vmm, in gf100_fifo_gpfifo_engine_ctor() 230 args->v0.version, args->v0.vmm, args->v0.ioffset, in gf100_fifo_gpfifo_new() 232 if (!args->v0.vmm) in gf100_fifo_gpfifo_new() 245 0x1000, 0x1000, true, args->v0.vmm, 0, in gf100_fifo_gpfifo_new()
|
D | gpfifogv100.c | 117 u64 vmm, u64 ioffset, u64 ilength, in gv100_fifo_gpfifo_new_() argument 127 if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) in gv100_fifo_gpfifo_new_() 146 0x1000, 0x1000, true, vmm, 0, subdevs, in gv100_fifo_gpfifo_new_() 213 args->v0.version, args->v0.vmm, args->v0.ioffset, in gv100_fifo_gpfifo_new() 218 args->v0.vmm, in gv100_fifo_gpfifo_new()
|