Lines Matching full:fifo

38 tu102_fifo_runlist_commit(struct gk104_fifo *fifo, int runl,  in tu102_fifo_runlist_commit()  argument
41 struct nvkm_device *device = fifo->base.engine.subdev.device; in tu102_fifo_runlist_commit()
89 tu102_fifo_pbdma_init(struct gk104_fifo *fifo) in tu102_fifo_pbdma_init() argument
91 struct nvkm_device *device = fifo->base.engine.subdev.device; in tu102_fifo_pbdma_init()
92 const u32 mask = (1 << fifo->pbdma_nr) - 1; in tu102_fifo_pbdma_init()
121 struct gk104_fifo *fifo = container_of(w, typeof(*fifo), recover.work); in tu102_fifo_recover_work() local
122 struct nvkm_device *device = fifo->base.engine.subdev.device; in tu102_fifo_recover_work()
128 spin_lock_irqsave(&fifo->base.lock, flags); in tu102_fifo_recover_work()
129 runm = fifo->recover.runm; in tu102_fifo_recover_work()
130 engm = fifo->recover.engm; in tu102_fifo_recover_work()
131 fifo->recover.engm = 0; in tu102_fifo_recover_work()
132 fifo->recover.runm = 0; in tu102_fifo_recover_work()
133 spin_unlock_irqrestore(&fifo->base.lock, flags); in tu102_fifo_recover_work()
138 if ((engine = fifo->engine[engn].engine)) { in tu102_fifo_recover_work()
145 gk104_fifo_runlist_update(fifo, runl); in tu102_fifo_recover_work()
150 static void tu102_fifo_recover_engn(struct gk104_fifo *fifo, int engn);
153 tu102_fifo_recover_runl(struct gk104_fifo *fifo, int runl) in tu102_fifo_recover_runl() argument
155 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in tu102_fifo_recover_runl()
159 assert_spin_locked(&fifo->base.lock); in tu102_fifo_recover_runl()
160 if (fifo->recover.runm & runm) in tu102_fifo_recover_runl()
162 fifo->recover.runm |= runm; in tu102_fifo_recover_runl()
169 schedule_work(&fifo->recover.work); in tu102_fifo_recover_runl()
173 tu102_fifo_recover_chid(struct gk104_fifo *fifo, int runl, int chid) in tu102_fifo_recover_chid() argument
178 list_for_each_entry(chan, &fifo->runlist[runl].chan, head) { in tu102_fifo_recover_chid()
185 list_for_each_entry(cgrp, &fifo->runlist[runl].cgrp, head) { in tu102_fifo_recover_chid()
201 struct gk104_fifo *fifo = gk104_fifo(base); in tu102_fifo_recover_chan() local
202 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in tu102_fifo_recover_chan()
207 unsigned long engn, engm = fifo->runlist[runl].engm; in tu102_fifo_recover_chan()
210 assert_spin_locked(&fifo->base.lock); in tu102_fifo_recover_chan()
215 chan = tu102_fifo_recover_chid(fifo, runl, chid); in tu102_fifo_recover_chan()
218 nvkm_fifo_kevent(&fifo->base, chid); in tu102_fifo_recover_chan()
226 tu102_fifo_recover_runl(fifo, runl); in tu102_fifo_recover_chan()
229 for_each_set_bit(engn, &engm, fifo->engine_nr) { in tu102_fifo_recover_chan()
232 gk104_fifo_engine_status(fifo, engn, &status); in tu102_fifo_recover_chan()
235 tu102_fifo_recover_engn(fifo, engn); in tu102_fifo_recover_chan()
240 tu102_fifo_recover_engn(struct gk104_fifo *fifo, int engn) in tu102_fifo_recover_engn() argument
242 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in tu102_fifo_recover_engn()
244 const u32 runl = fifo->engine[engn].runl; in tu102_fifo_recover_engn()
248 assert_spin_locked(&fifo->base.lock); in tu102_fifo_recover_engn()
249 if (fifo->recover.engm & engm) in tu102_fifo_recover_engn()
251 fifo->recover.engm |= engm; in tu102_fifo_recover_engn()
254 tu102_fifo_recover_runl(fifo, runl); in tu102_fifo_recover_engn()
257 gk104_fifo_engine_status(fifo, engn, &status); in tu102_fifo_recover_engn()
260 tu102_fifo_recover_chan(&fifo->base, status.chan->id); in tu102_fifo_recover_engn()
268 schedule_work(&fifo->recover.work); in tu102_fifo_recover_engn()
274 struct gk104_fifo *fifo = gk104_fifo(base); in tu102_fifo_fault() local
275 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in tu102_fifo_fault()
285 er = nvkm_enum_find(fifo->func->fault.reason, info->reason); in tu102_fifo_fault()
286 ee = nvkm_enum_find(fifo->func->fault.engine, info->engine); in tu102_fifo_fault()
288 ec = nvkm_enum_find(fifo->func->fault.hubclient, info->client); in tu102_fifo_fault()
290 ec = nvkm_enum_find(fifo->func->fault.gpcclient, info->client); in tu102_fifo_fault()
293 ea = nvkm_enum_find(fifo->func->fault.access, info->access); in tu102_fifo_fault()
323 spin_lock_irqsave(&fifo->base.lock, flags); in tu102_fifo_fault()
324 chan = nvkm_fifo_chan_inst_locked(&fifo->base, info->inst); in tu102_fifo_fault()
337 tu102_fifo_recover_chan(&fifo->base, chan->chid); in tu102_fifo_fault()
343 for (engn = 0; engn < fifo->engine_nr && engine; engn++) { in tu102_fifo_fault()
344 if (fifo->engine[engn].engine == engine) { in tu102_fifo_fault()
345 tu102_fifo_recover_engn(fifo, engn); in tu102_fifo_fault()
350 spin_unlock_irqrestore(&fifo->base.lock, flags); in tu102_fifo_fault()
354 tu102_fifo_intr_ctxsw_timeout(struct gk104_fifo *fifo) in tu102_fifo_intr_ctxsw_timeout() argument
356 struct nvkm_device *device = fifo->base.engine.subdev.device; in tu102_fifo_intr_ctxsw_timeout()
360 spin_lock_irqsave(&fifo->base.lock, flags); in tu102_fifo_intr_ctxsw_timeout()
366 tu102_fifo_recover_engn(fifo, engn); in tu102_fifo_intr_ctxsw_timeout()
368 spin_unlock_irqrestore(&fifo->base.lock, flags); in tu102_fifo_intr_ctxsw_timeout()
372 tu102_fifo_intr_sched(struct gk104_fifo *fifo) in tu102_fifo_intr_sched() argument
374 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in tu102_fifo_intr_sched()
385 struct gk104_fifo *fifo = gk104_fifo(base); in tu102_fifo_intr() local
386 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in tu102_fifo_intr()
392 gk104_fifo_intr_bind(fifo); in tu102_fifo_intr()
398 tu102_fifo_intr_ctxsw_timeout(fifo); in tu102_fifo_intr()
403 tu102_fifo_intr_sched(fifo); in tu102_fifo_intr()
409 gk104_fifo_intr_chsw(fifo); in tu102_fifo_intr()
420 gk104_fifo_intr_pbdma_0(fifo, unit); in tu102_fifo_intr()
421 gk104_fifo_intr_pbdma_1(fifo, unit); in tu102_fifo_intr()
429 gk104_fifo_intr_runlist(fifo); in tu102_fifo_intr()
435 gk104_fifo_intr_engine(fifo); in tu102_fifo_intr()
468 struct gk104_fifo *fifo; in tu102_fifo_new() local
470 if (!(fifo = kzalloc(sizeof(*fifo), GFP_KERNEL))) in tu102_fifo_new()
472 fifo->func = &tu102_fifo; in tu102_fifo_new()
473 INIT_WORK(&fifo->recover.work, tu102_fifo_recover_work); in tu102_fifo_new()
474 *pfifo = &fifo->base; in tu102_fifo_new()
476 return nvkm_fifo_ctor(&tu102_fifo_, device, type, inst, 4096, &fifo->base); in tu102_fifo_new()