Lines Matching full:fifo
30 nv50_fifo_runlist_update_locked(struct nv50_fifo *fifo) in nv50_fifo_runlist_update_locked() argument
32 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_runlist_update_locked()
36 cur = fifo->runlist[fifo->cur_runlist]; in nv50_fifo_runlist_update_locked()
37 fifo->cur_runlist = !fifo->cur_runlist; in nv50_fifo_runlist_update_locked()
40 for (i = 0, p = 0; i < fifo->base.nr; i++) { in nv50_fifo_runlist_update_locked()
52 nv50_fifo_runlist_update(struct nv50_fifo *fifo) in nv50_fifo_runlist_update() argument
54 mutex_lock(&fifo->base.mutex); in nv50_fifo_runlist_update()
55 nv50_fifo_runlist_update_locked(fifo); in nv50_fifo_runlist_update()
56 mutex_unlock(&fifo->base.mutex); in nv50_fifo_runlist_update()
62 struct nv50_fifo *fifo = nv50_fifo(base); in nv50_fifo_oneinit() local
63 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_oneinit()
67 false, &fifo->runlist[0]); in nv50_fifo_oneinit()
72 false, &fifo->runlist[1]); in nv50_fifo_oneinit()
78 struct nv50_fifo *fifo = nv50_fifo(base); in nv50_fifo_init() local
79 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_init()
92 nv50_fifo_runlist_update_locked(fifo); in nv50_fifo_init()
102 struct nv50_fifo *fifo = nv50_fifo(base); in nv50_fifo_dtor() local
103 nvkm_memory_unref(&fifo->runlist[1]); in nv50_fifo_dtor()
104 nvkm_memory_unref(&fifo->runlist[0]); in nv50_fifo_dtor()
105 return fifo; in nv50_fifo_dtor()
112 struct nv50_fifo *fifo; in nv50_fifo_new_() local
115 if (!(fifo = kzalloc(sizeof(*fifo), GFP_KERNEL))) in nv50_fifo_new_()
117 *pfifo = &fifo->base; in nv50_fifo_new_()
119 ret = nvkm_fifo_ctor(func, device, type, inst, 128, &fifo->base); in nv50_fifo_new_()
123 set_bit(0, fifo->base.mask); /* PIO channel */ in nv50_fifo_new_()
124 set_bit(127, fifo->base.mask); /* inactive channel */ in nv50_fifo_new_()