Home
last modified time | relevance | path

Searched refs:chid (Results 1 – 25 of 95) sorted by relevance

1234

/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/core/
Dramht.c27 nvkm_ramht_hash(struct nvkm_ramht *ramht, int chid, u32 handle) in nvkm_ramht_hash() argument
36 hash ^= chid << (ramht->bits - 4); in nvkm_ramht_hash()
41 nvkm_ramht_search(struct nvkm_ramht *ramht, int chid, u32 handle) in nvkm_ramht_search() argument
45 co = ho = nvkm_ramht_hash(ramht, chid, handle); in nvkm_ramht_search()
47 if (ramht->data[co].chid == chid) { in nvkm_ramht_search()
61 int chid, int addr, u32 handle, u32 context) in nvkm_ramht_update() argument
68 data->chid = chid; in nvkm_ramht_update()
75 data->chid = -1; in nvkm_ramht_update()
108 int chid, int addr, u32 handle, u32 context) in nvkm_ramht_insert() argument
112 if (nvkm_ramht_search(ramht, chid, handle)) in nvkm_ramht_insert()
[all …]
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Dgp102.c38 int ctrl = chan->chid.ctrl; in gp102_disp_dmac_init()
39 int user = chan->chid.user; in gp102_disp_dmac_init()
148 gp102_disp_intr_error(struct nvkm_disp *disp, int chid) in gp102_disp_intr_error() argument
152 u32 mthd = nvkm_rd32(device, 0x6111f0 + (chid * 12)); in gp102_disp_intr_error()
153 u32 data = nvkm_rd32(device, 0x6111f4 + (chid * 12)); in gp102_disp_intr_error()
154 u32 unkn = nvkm_rd32(device, 0x6111f8 + (chid * 12)); in gp102_disp_intr_error()
157 chid, (mthd & 0x0000ffc), data, mthd, unkn); in gp102_disp_intr_error()
159 if (chid < ARRAY_SIZE(disp->chan)) { in gp102_disp_intr_error()
162 nv50_disp_chan_mthd(disp->chan[chid], NV_DBG_ERROR); in gp102_disp_intr_error()
169 nvkm_wr32(device, 0x61009c, (1 << chid)); in gp102_disp_intr_error()
[all …]
Dgf119.c485 const u32 mask = 0x00000001 << chan->chid.user; in gf119_disp_chan_intr()
500 int ctrl = chan->chid.ctrl; in gf119_disp_pioc_fini()
501 int user = chan->chid.user; in gf119_disp_pioc_fini()
519 int ctrl = chan->chid.ctrl; in gf119_disp_pioc_init()
520 int user = chan->chid.user; in gf119_disp_pioc_init()
548 return nvkm_ramht_insert(chan->disp->ramht, object, chan->chid.user, -9, handle, in gf119_disp_dmac_bind()
549 chan->chid.user << 27 | 0x00000001); in gf119_disp_dmac_bind()
557 int ctrl = chan->chid.ctrl; in gf119_disp_dmac_fini()
558 int user = chan->chid.user; in gf119_disp_dmac_fini()
579 int ctrl = chan->chid.ctrl; in gf119_disp_dmac_init()
[all …]
Dgv100.c316 return 0x690000 + ((chan->chid.user - 1) * 0x1000); in gv100_disp_chan_user()
323 const u32 soff = (chan->chid.ctrl - 1) * 0x04; in gv100_disp_dmac_idle()
336 return nvkm_ramht_insert(chan->disp->ramht, object, chan->chid.user, -9, handle, in gv100_disp_dmac_bind()
337 chan->chid.user << 25 | 0x00000040); in gv100_disp_dmac_bind()
344 const u32 uoff = (chan->chid.ctrl - 1) * 0x1000; in gv100_disp_dmac_fini()
345 const u32 coff = chan->chid.ctrl * 0x04; in gv100_disp_dmac_fini()
357 const u32 uoff = (chan->chid.ctrl - 1) * 0x1000; in gv100_disp_dmac_init()
358 const u32 poff = chan->chid.ctrl * 0x10; in gv100_disp_dmac_init()
359 const u32 coff = chan->chid.ctrl * 0x04; in gv100_disp_dmac_init()
538 const u32 soff = (chan->chid.ctrl - 1) * 0x04; in gv100_disp_curs_idle()
[all …]
Dnv50.c468 mthd->name, chan->chid.user); in nv50_disp_chan_mthd()
504 nv50_disp_chan_uevent_send(struct nvkm_disp *disp, int chid) in nv50_disp_chan_uevent_send() argument
506 nvkm_event_send(&disp->uevent, NVKM_DISP_EVENT_CHAN_AWAKEN, chid, NULL, 0); in nv50_disp_chan_uevent_send()
519 return 0x640000 + (chan->chid.user * 0x1000); in nv50_disp_chan_user()
526 const u32 mask = 0x00010001 << chan->chid.user; in nv50_disp_chan_intr()
527 const u32 data = en ? 0x00010000 << chan->chid.user : 0x00000000; in nv50_disp_chan_intr()
537 int ctrl = chan->chid.ctrl; in nv50_disp_pioc_fini()
538 int user = chan->chid.user; in nv50_disp_pioc_fini()
556 int ctrl = chan->chid.ctrl; in nv50_disp_pioc_init()
557 int user = chan->chid.user; in nv50_disp_pioc_init()
[all …]
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Ddmanv40.c77 int chid; in nv40_fifo_dma_engine_fini() local
85 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_fini()
86 if (chid == chan->base.chid) in nv40_fifo_dma_engine_fini()
107 int chid; in nv40_fifo_dma_engine_init() local
116 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_init()
117 if (chid == chan->base.chid) in nv40_fifo_dma_engine_init()
156 u32 context = chan->base.chid << 23; in nv40_fifo_dma_object_ctor()
171 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, in nv40_fifo_dma_object_ctor()
229 args->v0.chid = chan->base.chid; in nv40_fifo_dma_new()
230 chan->ramfc = chan->base.chid * 128; in nv40_fifo_dma_new()
Ddmanv04.c52 u32 context = 0x80000000 | chan->base.chid << 24; in nv04_fifo_dma_object_ctor()
67 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, in nv04_fifo_dma_object_ctor()
84 u32 chid; in nv04_fifo_dma_fini() local
91 chid = nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH1) & mask; in nv04_fifo_dma_fini()
92 if (chid == chan->base.chid) { in nv04_fifo_dma_fini()
121 nvkm_mask(device, NV04_PFIFO_MODE, 1 << chan->base.chid, 0); in nv04_fifo_dma_fini()
132 u32 mask = 1 << chan->base.chid; in nv04_fifo_dma_init()
202 args->v0.chid = chan->base.chid; in nv04_fifo_dma_new()
203 chan->ramfc = chan->base.chid * 32; in nv04_fifo_dma_new()
Dnv04.c140 nv04_fifo_swmthd(struct nvkm_device *device, u32 chid, u32 addr, u32 data) in nv04_fifo_swmthd() argument
159 handled = nvkm_sw_mthd(sw, chid, subc, mthd, data); in nv04_fifo_swmthd()
169 nv04_fifo_cache_error(struct nv04_fifo *fifo, u32 chid, u32 get) in nv04_fifo_cache_error() argument
195 !nv04_fifo_swmthd(device, chid, mthd, data)) { in nv04_fifo_cache_error()
196 chan = nvkm_fifo_chan_chid(&fifo->base, chid, &flags); in nv04_fifo_cache_error()
199 chid, chan ? chan->object.client->name : "unknown", in nv04_fifo_cache_error()
220 nv04_fifo_dma_pusher(struct nv04_fifo *fifo, u32 chid) in nv04_fifo_dma_pusher() argument
232 chan = nvkm_fifo_chan_chid(&fifo->base, chid, &flags); in nv04_fifo_dma_pusher()
243 chid, name, ho_get, dma_get, ho_put, dma_put, in nv04_fifo_dma_pusher()
258 chid, name, dma_get, dma_put, state, in nv04_fifo_dma_pusher()
[all …]
Dtu102.c173 tu102_fifo_recover_chid(struct gk104_fifo *fifo, int runl, int chid) in tu102_fifo_recover_chid() argument
179 if (chan->base.chid == chid) { in tu102_fifo_recover_chid()
186 if (cgrp->id == chid) { in tu102_fifo_recover_chid()
199 tu102_fifo_recover_chan(struct nvkm_fifo *base, int chid) in tu102_fifo_recover_chan() argument
204 const u32 stat = nvkm_rd32(device, 0x800004 + (chid * 0x08)); in tu102_fifo_recover_chan()
215 chan = tu102_fifo_recover_chid(fifo, runl, chid); in tu102_fifo_recover_chan()
218 nvkm_fifo_kevent(&fifo->base, chid); in tu102_fifo_recover_chan()
222 nvkm_wr32(device, 0x800004 + (chid * 0x08), stat | 0x00000800); in tu102_fifo_recover_chan()
223 nvkm_warn(subdev, "channel %d: killed\n", chid); in tu102_fifo_recover_chan()
233 if (!status.chan || status.chan->id != chid) in tu102_fifo_recover_chan()
[all …]
Dgpfifogk104.c50 nvkm_wr32(device, 0x002634, chan->base.chid); in gk104_fifo_gpfifo_kick_locked()
57 cgrp ? cgrp->id : chan->base.chid, client->name); in gk104_fifo_gpfifo_kick_locked()
58 nvkm_fifo_recover_chan(&fifo->base, chan->base.chid); in gk104_fifo_gpfifo_kick_locked()
204 u32 coff = chan->base.chid * 8; in gk104_fifo_gpfifo_fini()
223 u32 coff = chan->base.chid * 8; in gk104_fifo_gpfifo_init()
257 gk104_fifo_gpfifo_new_(struct gk104_fifo *fifo, u64 *runlists, u16 *chid, in gk104_fifo_gpfifo_new_() argument
285 *chid = chan->base.chid; in gk104_fifo_gpfifo_new_()
294 chan->cgrp->id = chan->base.chid; in gk104_fifo_gpfifo_new_()
301 usermem = chan->base.chid * 0x200; in gk104_fifo_gpfifo_new_()
324 nvkm_wo32(chan->base.inst, 0xe8, chan->base.chid); in gk104_fifo_gpfifo_new_()
[all …]
Dchannv50.c90 chan->base.chid, chan->base.object.client->name); in nv50_fifo_chan_engine_fini()
192 u32 chid = chan->base.chid; in nv50_fifo_chan_fini() local
195 nvkm_mask(device, 0x002600 + (chid * 4), 0x80000000, 0x00000000); in nv50_fifo_chan_fini()
197 nvkm_wr32(device, 0x002600 + (chid * 4), 0x00000000); in nv50_fifo_chan_fini()
207 u32 chid = chan->base.chid; in nv50_fifo_chan_init() local
209 nvkm_wr32(device, 0x002600 + (chid * 4), 0x80000000 | addr); in nv50_fifo_chan_init()
Dgpfifogv100.c34 return chan->chid; in gv100_fifo_gpfifo_submit_token()
138 struct gk104_fifo *fifo, u64 *runlists, u16 *chid, in gv100_fifo_gpfifo_new_() argument
165 *chid = chan->base.chid; in gv100_fifo_gpfifo_new_()
175 chan->cgrp->id = chan->base.chid; in gv100_fifo_gpfifo_new_()
182 usermem = chan->base.chid * 0x200; in gv100_fifo_gpfifo_new_()
203 nvkm_wo32(chan->base.inst, 0x0e8, chan->base.chid); in gv100_fifo_gpfifo_new_()
230 &args->v0.chid, in gv100_fifo_gpfifo_new()
Dbase.c37 nvkm_fifo_recover_chan(struct nvkm_fifo *fifo, int chid) in nvkm_fifo_recover_chan() argument
43 fifo->func->recover_chan(fifo, chid); in nvkm_fifo_recover_chan()
105 nvkm_fifo_chan_chid(struct nvkm_fifo *fifo, int chid, unsigned long *rflags) in nvkm_fifo_chan_chid() argument
111 if (chan->chid == chid) { in nvkm_fifo_chan_chid()
123 nvkm_fifo_kevent(struct nvkm_fifo *fifo, int chid) in nvkm_fifo_kevent() argument
125 nvkm_event_send(&fifo->kevent, 1, chid, NULL, 0); in nvkm_fifo_kevent()
136 notify->index = chan->chid; in nvkm_fifo_kevent_ctor()
Dgf100.c66 nvkm_wo32(cur, (nr * 8) + 0, chan->base.chid); in gf100_fifo_runlist_commit()
180 u32 chid = chan->base.chid; in gf100_fifo_recover() local
184 engine->subdev.name, chid); in gf100_fifo_recover()
187 nvkm_mask(device, 0x003004 + (chid * 0x08), 0x00000001, 0x00000000); in gf100_fifo_recover()
194 nvkm_fifo_kevent(&fifo->base, chid); in gf100_fifo_recover()
302 info->reason, er ? er->name : "", chan ? chan->chid : -1, in gf100_fifo_fault()
332 u32 chid = (stat & 0x0000007f); in gf100_fifo_intr_sched_ctxsw() local
337 if (chan->base.chid == chid) { in gf100_fifo_intr_sched_ctxsw()
412 u32 chid = nvkm_rd32(device, 0x040120 + (unit * 0x2000)) & 0x7f; in gf100_fifo_intr_pbdma() local
422 if (nvkm_sw_mthd(device->sw, chid, subc, mthd, data)) in gf100_fifo_intr_pbdma()
[all …]
Dgpfifogf100.c89 nvkm_wr32(device, 0x002634, chan->base.chid); in gf100_fifo_gpfifo_engine_fini()
91 if (nvkm_rd32(device, 0x002634) == chan->base.chid) in gf100_fifo_gpfifo_engine_fini()
95 chan->base.chid, chan->base.object.client->name); in gf100_fifo_gpfifo_engine_fini()
171 u32 coff = chan->base.chid * 8; in gf100_fifo_gpfifo_fini()
191 u32 coff = chan->base.chid * 8; in gf100_fifo_gpfifo_init()
265 args->v0.chid = chan->base.chid; in gf100_fifo_gpfifo_new()
269 usermem = chan->base.chid * 0x1000; in gf100_fifo_gpfifo_new()
Dpriv.h10 void nvkm_fifo_kevent(struct nvkm_fifo *, int chid);
11 void nvkm_fifo_recover_chan(struct nvkm_fifo *, int chid);
31 void (*recover_chan)(struct nvkm_fifo *, int chid);
Dgk104.c228 nvkm_wo32(memory, offset + 0, chan->base.chid); in gk104_fifo_runlist_chan()
344 gk104_fifo_recover_chid(struct gk104_fifo *fifo, int runl, int chid) in gk104_fifo_recover_chid() argument
350 if (chan->base.chid == chid) { in gk104_fifo_recover_chid()
357 if (cgrp->id == chid) { in gk104_fifo_recover_chid()
370 gk104_fifo_recover_chan(struct nvkm_fifo *base, int chid) in gk104_fifo_recover_chan() argument
375 const u32 stat = nvkm_rd32(device, 0x800004 + (chid * 0x08)); in gk104_fifo_recover_chan()
386 chan = gk104_fifo_recover_chid(fifo, runl, chid); in gk104_fifo_recover_chan()
389 nvkm_fifo_kevent(&fifo->base, chid); in gk104_fifo_recover_chan()
393 nvkm_wr32(device, 0x800004 + (chid * 0x08), stat | 0x00000800); in gk104_fifo_recover_chan()
394 nvkm_warn(subdev, "channel %d: killed\n", chid); in gk104_fifo_recover_chan()
[all …]
Ddmanv10.c73 args->v0.chid = chan->base.chid; in nv10_fifo_dma_new()
74 chan->ramfc = chan->base.chid * 32; in nv10_fifo_dma_new()
/Linux-v6.1/drivers/dma/qcom/
Dgpi.c91 #define GPII_n_CH_CMD(opcode, chid) \ argument
93 FIELD_PREP(GPII_n_CH_CMD_CHID, chid))
140 #define GPII_n_EV_CMD(opcode, chid) \ argument
142 FIELD_PREP(GPII_n_EV_CMD_CHID, chid))
246 u8 chid; member
257 u8 chid; member
267 u8 chid; member
487 u32 chid; member
689 u32 chid = MAX_CHANNELS_PER_GPII; in gpi_send_cmd() local
697 chid = gchan->chid; in gpi_send_cmd()
[all …]
/Linux-v6.1/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dnv20.c24 nvkm_wo32(gr->ctxtab, chan->chid * 4, inst >> 4); in nv20_gr_chan_init()
36 int chid = -1; in nv20_gr_chan_fini() local
40 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini()
41 if (chan->chid == chid) { in nv20_gr_chan_fini()
54 nvkm_wo32(gr->ctxtab, chan->chid * 4, 0x00000000); in nv20_gr_chan_fini()
86 chan->chid = fifoch->chid; in nv20_gr_chan_new()
96 nvkm_wo32(chan->inst, 0x0000, 0x00000001 | (chan->chid << 24)); in nv20_gr_chan_new()
190 u32 chid = (addr & 0x01f00000) >> 20; in nv20_gr_intr() local
199 chan = nvkm_fifo_chan_chid(device->fifo, chid, &flags); in nv20_gr_intr()
211 show, msg, nsource, src, nstatus, sta, chid, in nv20_gr_intr()
Dnv10.c402 int chid; member
552 int chid = nvkm_rd32(device, 0x400148) >> 24; in nv10_gr_channel() local
553 if (chid < ARRAY_SIZE(gr->chan)) in nv10_gr_channel()
554 chan = gr->chan[chid]; in nv10_gr_channel()
812 nv10_gr_load_dma_vtxbuf(struct nv10_gr_chan *chan, int chid, u32 inst) in nv10_gr_load_dma_vtxbuf() argument
861 0x2c000000 | chid << 20 | subchan << 16 | 0x18c); in nv10_gr_load_dma_vtxbuf()
883 nv10_gr_load_context(struct nv10_gr_chan *chan, int chid) in nv10_gr_load_context() argument
901 nv10_gr_load_dma_vtxbuf(chan, chid, inst); in nv10_gr_load_context()
904 nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, chid << 24); in nv10_gr_load_context()
937 int chid; in nv10_gr_context_switch() local
[all …]
Dnv04.c362 int chid; member
1077 int chid = nvkm_rd32(device, NV04_PGRAPH_CTX_USER) >> 24; in nv04_gr_channel() local
1078 if (chid < ARRAY_SIZE(gr->chan)) in nv04_gr_channel()
1079 chan = gr->chan[chid]; in nv04_gr_channel()
1085 nv04_gr_load_context(struct nv04_gr_chan *chan, int chid) in nv04_gr_load_context() argument
1094 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24); in nv04_gr_load_context()
1119 int chid; in nv04_gr_context_switch() local
1129 chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f; in nv04_gr_context_switch()
1130 next = gr->chan[chid]; in nv04_gr_context_switch()
1132 nv04_gr_load_context(next, chid); in nv04_gr_context_switch()
[all …]
/Linux-v6.1/drivers/bus/mhi/
Dcommon.h126 #define MHI_TRE_CMD_RESET_DWORD1(chid) cpu_to_le32(FIELD_PREP(GENMASK(31, 24), chid) | \ argument
133 #define MHI_TRE_CMD_STOP_DWORD1(chid) cpu_to_le32(FIELD_PREP(GENMASK(31, 24), chid) | \ argument
140 #define MHI_TRE_CMD_START_DWORD1(chid) cpu_to_le32(FIELD_PREP(GENMASK(31, 24), chid) | \ argument
152 #define MHI_TRE_EV_DWORD1(chid, type) cpu_to_le32(FIELD_PREP(GENMASK(31, 24), chid) | \ argument
/Linux-v6.1/drivers/gpu/drm/nouveau/include/nvkm/core/
Dramht.h9 int chid; member
26 int chid, int addr, u32 handle, u32 context);
29 nvkm_ramht_search(struct nvkm_ramht *, int chid, u32 handle);
/Linux-v6.1/drivers/gpu/drm/nouveau/
Dnouveau_chan.c53 NV_PRINTK(warn, cli, "channel %d killed!\n", chan->chid); in nouveau_channel_killed()
76 chan->chid, nvxx_client(&cli->base)->name); in nouveau_channel_idle()
320 chan->chid = args.volta.chid; in nouveau_channel_ind()
325 chan->chid = args.kepler.chid; in nouveau_channel_ind()
329 chan->chid = args.fermi.chid; in nouveau_channel_ind()
331 chan->chid = args.nv50.chid; in nouveau_channel_ind()
371 chan->chid = args.chid; in nouveau_channel_dma()

1234