Lines Matching refs:falcon
31 struct nvkm_falcon *falcon = nvkm_falcon(oclass->engine); in nvkm_falcon_oclass_get() local
34 while (falcon->func->sclass[c].oclass) { in nvkm_falcon_oclass_get()
36 oclass->base = falcon->func->sclass[index]; in nvkm_falcon_oclass_get()
60 struct nvkm_falcon *falcon = nvkm_falcon(engine); in nvkm_falcon_intr() local
61 struct nvkm_subdev *subdev = &falcon->engine.subdev; in nvkm_falcon_intr()
63 const u32 base = falcon->addr; in nvkm_falcon_intr()
73 if (falcon->func->intr) { in nvkm_falcon_intr()
74 falcon->func->intr(falcon, chan); in nvkm_falcon_intr()
97 struct nvkm_falcon *falcon = nvkm_falcon(engine); in nvkm_falcon_fini() local
98 struct nvkm_device *device = falcon->engine.subdev.device; in nvkm_falcon_fini()
99 const u32 base = falcon->addr; in nvkm_falcon_fini()
102 nvkm_memory_unref(&falcon->core); in nvkm_falcon_fini()
103 if (falcon->external) { in nvkm_falcon_fini()
104 vfree(falcon->data.data); in nvkm_falcon_fini()
105 vfree(falcon->code.data); in nvkm_falcon_fini()
106 falcon->code.data = NULL; in nvkm_falcon_fini()
128 struct nvkm_falcon *falcon = nvkm_falcon(engine); in nvkm_falcon_oneinit() local
129 struct nvkm_subdev *subdev = &falcon->engine.subdev; in nvkm_falcon_oneinit()
131 const u32 base = falcon->addr; in nvkm_falcon_oneinit()
137 falcon->version = 0; in nvkm_falcon_oneinit()
138 falcon->secret = (falcon->addr == 0x087000) ? 1 : 0; in nvkm_falcon_oneinit()
141 falcon->version = (caps & 0x0000000f); in nvkm_falcon_oneinit()
142 falcon->secret = (caps & 0x00000030) >> 4; in nvkm_falcon_oneinit()
146 falcon->code.limit = (caps & 0x000001ff) << 8; in nvkm_falcon_oneinit()
147 falcon->data.limit = (caps & 0x0003fe00) >> 1; in nvkm_falcon_oneinit()
149 nvkm_debug(subdev, "falcon version: %d\n", falcon->version); in nvkm_falcon_oneinit()
150 nvkm_debug(subdev, "secret level: %d\n", falcon->secret); in nvkm_falcon_oneinit()
151 nvkm_debug(subdev, "code limit: %d\n", falcon->code.limit); in nvkm_falcon_oneinit()
152 nvkm_debug(subdev, "data limit: %d\n", falcon->data.limit); in nvkm_falcon_oneinit()
159 struct nvkm_falcon *falcon = nvkm_falcon(engine); in nvkm_falcon_init() local
160 struct nvkm_subdev *subdev = &falcon->engine.subdev; in nvkm_falcon_init()
164 const u32 base = falcon->addr; in nvkm_falcon_init()
168 if (falcon->secret && falcon->version < 4) { in nvkm_falcon_init()
169 if (!falcon->version) { in nvkm_falcon_init()
189 if (!falcon->code.data) { in nvkm_falcon_init()
191 device->chipset, falcon->addr >> 12); in nvkm_falcon_init()
195 falcon->code.data = vmemdup(fw->data, fw->size); in nvkm_falcon_init()
196 falcon->code.size = fw->size; in nvkm_falcon_init()
197 falcon->data.data = NULL; in nvkm_falcon_init()
198 falcon->data.size = 0; in nvkm_falcon_init()
202 falcon->external = true; in nvkm_falcon_init()
208 if (!falcon->code.data) { in nvkm_falcon_init()
210 device->chipset, falcon->addr >> 12); in nvkm_falcon_init()
218 falcon->data.data = vmemdup(fw->data, fw->size); in nvkm_falcon_init()
219 falcon->data.size = fw->size; in nvkm_falcon_init()
221 if (!falcon->data.data) in nvkm_falcon_init()
225 device->chipset, falcon->addr >> 12); in nvkm_falcon_init()
233 falcon->code.data = vmemdup(fw->data, fw->size); in nvkm_falcon_init()
234 falcon->code.size = fw->size; in nvkm_falcon_init()
236 if (!falcon->code.data) in nvkm_falcon_init()
240 nvkm_debug(subdev, "firmware: %s (%s)\n", name, falcon->data.data ? in nvkm_falcon_init()
244 if (!falcon->data.data && !falcon->core) { in nvkm_falcon_init()
246 falcon->code.size, 256, false, in nvkm_falcon_init()
247 &falcon->core); in nvkm_falcon_init()
253 nvkm_kmap(falcon->core); in nvkm_falcon_init()
254 for (i = 0; i < falcon->code.size; i += 4) in nvkm_falcon_init()
255 nvkm_wo32(falcon->core, i, falcon->code.data[i / 4]); in nvkm_falcon_init()
256 nvkm_done(falcon->core); in nvkm_falcon_init()
260 if (falcon->core) { in nvkm_falcon_init()
261 u64 addr = nvkm_memory_addr(falcon->core); in nvkm_falcon_init()
271 if (falcon->code.size > falcon->code.limit || in nvkm_falcon_init()
272 falcon->data.size > falcon->data.limit) { in nvkm_falcon_init()
277 if (falcon->version < 3) { in nvkm_falcon_init()
279 for (i = 0; i < falcon->code.size / 4; i++) in nvkm_falcon_init()
280 nvkm_wr32(device, base + 0xff4, falcon->code.data[i]); in nvkm_falcon_init()
283 for (i = 0; i < falcon->code.size / 4; i++) { in nvkm_falcon_init()
286 nvkm_wr32(device, base + 0x184, falcon->code.data[i]); in nvkm_falcon_init()
292 if (falcon->version < 3) { in nvkm_falcon_init()
294 for (i = 0; !falcon->core && i < falcon->data.size / 4; i++) in nvkm_falcon_init()
295 nvkm_wr32(device, base + 0xff4, falcon->data.data[i]); in nvkm_falcon_init()
296 for (; i < falcon->data.limit; i += 4) in nvkm_falcon_init()
300 for (i = 0; !falcon->core && i < falcon->data.size / 4; i++) in nvkm_falcon_init()
301 nvkm_wr32(device, base + 0x1c4, falcon->data.data[i]); in nvkm_falcon_init()
302 for (; i < falcon->data.limit / 4; i++) in nvkm_falcon_init()
312 if (falcon->func->init) in nvkm_falcon_init()
313 falcon->func->init(falcon); in nvkm_falcon_init()
339 struct nvkm_falcon *falcon; in nvkm_falcon_new_() local
341 if (!(falcon = kzalloc(sizeof(*falcon), GFP_KERNEL))) in nvkm_falcon_new_()
343 falcon->func = func; in nvkm_falcon_new_()
344 falcon->addr = addr; in nvkm_falcon_new_()
345 falcon->code.data = func->code.data; in nvkm_falcon_new_()
346 falcon->code.size = func->code.size; in nvkm_falcon_new_()
347 falcon->data.data = func->data.data; in nvkm_falcon_new_()
348 falcon->data.size = func->data.size; in nvkm_falcon_new_()
349 *pengine = &falcon->engine; in nvkm_falcon_new_()
352 enable, &falcon->engine); in nvkm_falcon_new_()