Home
last modified time | relevance | path

Searched refs:runlist (Results 1 – 25 of 42) sorted by relevance

12

/Linux-v4.19/drivers/gpu/drm/nouveau/nvif/
Dfifo.c32 struct nv_device_info_v1_data runlist[64]; in nvif_fifo_runlists() member
37 if (device->runlist) in nvif_fifo_runlists()
45 for (i = 0; i < ARRAY_SIZE(a->v.runlist); i++) in nvif_fifo_runlists()
46 a->v.runlist[i].mthd = NV_DEVICE_FIFO_RUNLIST_ENGINES(i); in nvif_fifo_runlists()
53 device->runlist = kcalloc(device->runlists, sizeof(*device->runlist), in nvif_fifo_runlists()
55 if (!device->runlist) { in nvif_fifo_runlists()
62 device->runlist[i].engines = a->v.runlist[i].data; in nvif_fifo_runlists()
93 if (device->runlist[i].engines & a.v.engine.data) in nvif_fifo_runlist()
Ddevice.c41 kfree(device->runlist); in nvif_device_fini()
42 device->runlist = NULL; in nvif_device_fini()
52 device->runlist = NULL; in nvif_device_init()
/Linux-v4.19/fs/ntfs/
Drunlist.h57 } runlist; typedef
59 static inline void ntfs_init_runlist(runlist *rl) in ntfs_init_runlist()
95 runlist *const runlist, const s64 new_length);
97 int ntfs_rl_punch_nolock(const ntfs_volume *vol, runlist *const runlist,
Drunlist.c1499 int ntfs_rl_truncate_nolock(const ntfs_volume *vol, runlist *const runlist, in ntfs_rl_truncate_nolock() argument
1506 BUG_ON(!runlist); in ntfs_rl_truncate_nolock()
1508 rl = runlist->rl; in ntfs_rl_truncate_nolock()
1511 runlist->rl = NULL; in ntfs_rl_truncate_nolock()
1527 runlist->rl = rl; in ntfs_rl_truncate_nolock()
1551 old_size = trl - runlist->rl + 1; in ntfs_rl_truncate_nolock()
1569 int new_size = rl - runlist->rl + 1; in ntfs_rl_truncate_nolock()
1570 rl = ntfs_rl_realloc(runlist->rl, old_size, new_size); in ntfs_rl_truncate_nolock()
1578 runlist->rl = rl; in ntfs_rl_truncate_nolock()
1587 if ((rl > runlist->rl) && ((rl - 1)->lcn == LCN_HOLE)) in ntfs_rl_truncate_nolock()
[all …]
Dattrib.c187 rl = ntfs_mapping_pairs_decompress(ni->vol, a, ni->runlist.rl); in ntfs_map_runlist_nolock()
191 ni->runlist.rl = rl; in ntfs_map_runlist_nolock()
302 down_write(&ni->runlist.lock); in ntfs_map_runlist()
304 if (likely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) <= in ntfs_map_runlist()
307 up_write(&ni->runlist.lock); in ntfs_map_runlist()
354 if (!ni->runlist.rl) { in ntfs_attr_vcn_to_lcn_nolock()
364 lcn = ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn); in ntfs_attr_vcn_to_lcn_nolock()
376 up_read(&ni->runlist.lock); in ntfs_attr_vcn_to_lcn_nolock()
377 down_write(&ni->runlist.lock); in ntfs_attr_vcn_to_lcn_nolock()
378 if (unlikely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) != in ntfs_attr_vcn_to_lcn_nolock()
[all …]
Daops.c203 BUG_ON(!ni->runlist.rl && !ni->mft_no && !NInoAttr(ni)); in ntfs_read_block()
265 down_read(&ni->runlist.lock); in ntfs_read_block()
266 rl = ni->runlist.rl; in ntfs_read_block()
299 up_read(&ni->runlist.lock); in ntfs_read_block()
305 up_read(&ni->runlist.lock); in ntfs_read_block()
345 up_read(&ni->runlist.lock); in ntfs_read_block()
721 down_read(&ni->runlist.lock); in ntfs_write_block()
722 rl = ni->runlist.rl; in ntfs_write_block()
777 up_read(&ni->runlist.lock); in ntfs_write_block()
783 up_read(&ni->runlist.lock); in ntfs_write_block()
[all …]
Dmft.c551 runlist.lock); in ntfs_sync_mft_mirror()
552 rl = NTFS_I(vol->mftmirr_ino)->runlist.rl; in ntfs_sync_mft_mirror()
588 up_read(&NTFS_I(vol->mftmirr_ino)->runlist.lock); in ntfs_sync_mft_mirror()
743 down_read(&NTFS_I(vol->mft_ino)->runlist.lock); in write_mft_record_nolock()
744 rl = NTFS_I(vol->mft_ino)->runlist.rl; in write_mft_record_nolock()
775 up_read(&NTFS_I(vol->mft_ino)->runlist.lock); in write_mft_record_nolock()
1314 down_write(&mftbmp_ni->runlist.lock); in ntfs_mft_bitmap_extend_allocation_nolock()
1321 up_write(&mftbmp_ni->runlist.lock); in ntfs_mft_bitmap_extend_allocation_nolock()
1342 up_write(&mftbmp_ni->runlist.lock); in ntfs_mft_bitmap_extend_allocation_nolock()
1368 up_write(&mftbmp_ni->runlist.lock); in ntfs_mft_bitmap_extend_allocation_nolock()
[all …]
Dfile.c844 down_read(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
846 rl = ni->runlist.rl; in ntfs_prepare_pages_for_non_resident_write()
873 up_write(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
876 up_read(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
898 up_read(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
899 down_write(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
980 up_read(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
981 down_write(&ni->runlist.lock); in ntfs_prepare_pages_for_non_resident_write()
989 while (--rl2 >= ni->runlist.rl) { in ntfs_prepare_pages_for_non_resident_write()
1004 rl = ntfs_runlists_merge(ni->runlist.rl, rl2); in ntfs_prepare_pages_for_non_resident_write()
[all …]
Dinode.h69 runlist runlist; /* If state has the NI_NonResident bit set, member
101 runlist attr_list_rl; /* Run list for the attribute list value. */
Dcompress.c616 down_read(&ni->runlist.lock); in ntfs_read_compressed_block()
617 rl = ni->runlist.rl; in ntfs_read_compressed_block()
643 up_read(&ni->runlist.lock); in ntfs_read_compressed_block()
661 up_read(&ni->runlist.lock); in ntfs_read_compressed_block()
940 up_read(&ni->runlist.lock); in ntfs_read_compressed_block()
946 up_read(&ni->runlist.lock); in ntfs_read_compressed_block()
Dlogfile.c759 down_write(&log_ni->runlist.lock); in ntfs_empty_logfile()
760 rl = log_ni->runlist.rl; in ntfs_empty_logfile()
769 rl = log_ni->runlist.rl; in ntfs_empty_logfile()
835 up_write(&log_ni->runlist.lock); in ntfs_empty_logfile()
858 up_write(&log_ni->runlist.lock); in ntfs_empty_logfile()
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dgpfifogv100.c122 int runlist = ffs(*runlists) -1, ret, i; in gv100_fifo_gpfifo_new_() local
127 if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) in gv100_fifo_gpfifo_new_()
129 *runlists = BIT_ULL(runlist); in gv100_fifo_gpfifo_new_()
131 engm = fifo->runlist[runlist].engm; in gv100_fifo_gpfifo_new_()
142 chan->runl = runlist; in gv100_fifo_gpfifo_new_()
214 args->v0.ilength, args->v0.runlist); in gv100_fifo_gpfifo_new()
216 &args->v0.runlist, in gv100_fifo_gpfifo_new()
Dnv50.c36 cur = fifo->runlist[fifo->cur_runlist]; in nv50_fifo_runlist_update_locked()
67 false, &fifo->runlist[0]); in nv50_fifo_oneinit()
72 false, &fifo->runlist[1]); in nv50_fifo_oneinit()
103 nvkm_memory_unref(&fifo->runlist[1]); in nv50_fifo_dtor()
104 nvkm_memory_unref(&fifo->runlist[0]); in nv50_fifo_dtor()
Dgk104.c154 const struct gk104_fifo_runlist_func *func = fifo->func->runlist; in gk104_fifo_runlist_commit()
164 mem = fifo->runlist[runl].mem[fifo->runlist[runl].next]; in gk104_fifo_runlist_commit()
165 fifo->runlist[runl].next = !fifo->runlist[runl].next; in gk104_fifo_runlist_commit()
168 list_for_each_entry(chan, &fifo->runlist[runl].chan, head) { in gk104_fifo_runlist_commit()
172 list_for_each_entry(cgrp, &fifo->runlist[runl].cgrp, head) { in gk104_fifo_runlist_commit()
221 list_add_tail(&cgrp->head, &fifo->runlist[chan->runl].cgrp); in gk104_fifo_runlist_insert()
224 list_add_tail(&chan->head, &fifo->runlist[chan->runl].chan); in gk104_fifo_runlist_insert()
304 list_for_each_entry(chan, &fifo->runlist[runl].chan, head) { in gk104_fifo_recover_chid()
311 list_for_each_entry(cgrp, &fifo->runlist[runl].cgrp, head) { in gk104_fifo_recover_chid()
333 unsigned long engn, engm = fifo->runlist[runl].engm; in gk104_fifo_recover_chan()
[all …]
Dgpfifogk104.c248 int runlist = ffs(*runlists) -1, ret, i; in gk104_fifo_gpfifo_new_() local
253 if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) in gk104_fifo_gpfifo_new_()
255 *runlists = BIT_ULL(runlist); in gk104_fifo_gpfifo_new_()
257 engm = fifo->runlist[runlist].engm; in gk104_fifo_gpfifo_new_()
271 chan->runl = runlist; in gk104_fifo_gpfifo_new_()
342 args->v0.ilength, args->v0.runlist); in gk104_fifo_gpfifo_new()
344 &args->v0.runlist, in gk104_fifo_gpfifo_new()
Dgf100.c60 cur = fifo->runlist.mem[fifo->runlist.active]; in gf100_fifo_runlist_commit()
61 fifo->runlist.active = !fifo->runlist.active; in gf100_fifo_runlist_commit()
84 if (wait_event_timeout(fifo->runlist.wait, in gf100_fifo_runlist_commit()
433 wake_up(&fifo->runlist.wait); in gf100_fifo_intr_runlist()
572 false, &fifo->runlist.mem[0]); in gf100_fifo_oneinit()
577 false, &fifo->runlist.mem[1]); in gf100_fifo_oneinit()
581 init_waitqueue_head(&fifo->runlist.wait); in gf100_fifo_oneinit()
646 nvkm_memory_unref(&fifo->runlist.mem[0]); in gf100_fifo_dtor()
647 nvkm_memory_unref(&fifo->runlist.mem[1]); in gf100_fifo_dtor()
Dgk104.h38 } runlist[16]; member
64 } *runlist; member
Dnv50.h9 struct nvkm_memory *runlist[2]; member
Dgm200.c37 .runlist = &gm107_fifo_runlist,
Dgm20b.c35 .runlist = &gm107_fifo_runlist,
Dgk20a.c35 .runlist = &gk110_fifo_runlist,
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/top/
Dbase.c35 info->runlist = -1; in nvkm_top_device_new()
134 if (info->engine >= 0 && info->runlist >= 0 && n++ == index) { in nvkm_top_engine()
135 *runl = info->runlist; in nvkm_top_engine()
Dgk104.c58 info->runlist = (data & 0x01e00000) >> 21; in gk104_top_oneinit()
101 info->addr, info->fault, info->engine, info->runlist, in gk104_top_oneinit()
Dpriv.h19 int runlist; member
/Linux-v4.19/drivers/gpu/drm/nouveau/include/nvif/
Dcla06f.h11 __u64 runlist; member

12