Lines Matching +full:cs +full:- +full:x

2  * Copyright(c) 2011-2016 Intel Corporation. All rights reserved.
165 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs()
166 struct intel_uncore *uncore = engine->uncore; in load_render_mocs()
167 u32 cnt = gvt->engine_mmio_list.mocs_mmio_offset_list_cnt; in load_render_mocs()
168 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs()
177 if (!HAS_ENGINE(engine->gt, ring_id)) in load_render_mocs()
201 u32 *cs; in restore_context_mmio_for_inhibit() local
204 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit()
205 int ring_id = req->engine->id; in restore_context_mmio_for_inhibit()
206 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit()
211 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
215 cs = intel_ring_begin(req, count * 2 + 2); in restore_context_mmio_for_inhibit()
216 if (IS_ERR(cs)) in restore_context_mmio_for_inhibit()
217 return PTR_ERR(cs); in restore_context_mmio_for_inhibit()
219 *cs++ = MI_LOAD_REGISTER_IMM(count); in restore_context_mmio_for_inhibit()
220 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit()
221 i915_mmio_reg_valid(mmio->reg); mmio++) { in restore_context_mmio_for_inhibit()
222 if (mmio->id != ring_id || !mmio->in_context) in restore_context_mmio_for_inhibit()
225 *cs++ = i915_mmio_reg_offset(mmio->reg); in restore_context_mmio_for_inhibit()
226 *cs++ = vgpu_vreg_t(vgpu, mmio->reg) | (mmio->mask << 16); in restore_context_mmio_for_inhibit()
227 gvt_dbg_core("add lri reg pair 0x%x:0x%x in inhibit ctx, vgpu:%d, rind_id:%d\n", in restore_context_mmio_for_inhibit()
228 *(cs-2), *(cs-1), vgpu->id, ring_id); in restore_context_mmio_for_inhibit()
231 *cs++ = MI_NOOP; in restore_context_mmio_for_inhibit()
232 intel_ring_advance(req, cs); in restore_context_mmio_for_inhibit()
234 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
246 u32 *cs; in restore_render_mocs_control_for_inhibit() local
248 cs = intel_ring_begin(req, 2 * GEN9_MOCS_SIZE + 2); in restore_render_mocs_control_for_inhibit()
249 if (IS_ERR(cs)) in restore_render_mocs_control_for_inhibit()
250 return PTR_ERR(cs); in restore_render_mocs_control_for_inhibit()
252 *cs++ = MI_LOAD_REGISTER_IMM(GEN9_MOCS_SIZE); in restore_render_mocs_control_for_inhibit()
255 *cs++ = i915_mmio_reg_offset(GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
256 *cs++ = vgpu_vreg_t(vgpu, GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
257 gvt_dbg_core("add lri reg pair 0x%x:0x%x in inhibit ctx, vgpu:%d, rind_id:%d\n", in restore_render_mocs_control_for_inhibit()
258 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_control_for_inhibit()
262 *cs++ = MI_NOOP; in restore_render_mocs_control_for_inhibit()
263 intel_ring_advance(req, cs); in restore_render_mocs_control_for_inhibit()
273 u32 *cs; in restore_render_mocs_l3cc_for_inhibit() local
275 cs = intel_ring_begin(req, 2 * GEN9_MOCS_SIZE / 2 + 2); in restore_render_mocs_l3cc_for_inhibit()
276 if (IS_ERR(cs)) in restore_render_mocs_l3cc_for_inhibit()
277 return PTR_ERR(cs); in restore_render_mocs_l3cc_for_inhibit()
279 *cs++ = MI_LOAD_REGISTER_IMM(GEN9_MOCS_SIZE / 2); in restore_render_mocs_l3cc_for_inhibit()
282 *cs++ = i915_mmio_reg_offset(GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
283 *cs++ = vgpu_vreg_t(vgpu, GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
284 gvt_dbg_core("add lri reg pair 0x%x:0x%x in inhibit ctx, vgpu:%d, rind_id:%d\n", in restore_render_mocs_l3cc_for_inhibit()
285 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_l3cc_for_inhibit()
289 *cs++ = MI_NOOP; in restore_render_mocs_l3cc_for_inhibit()
290 intel_ring_advance(req, cs); in restore_render_mocs_l3cc_for_inhibit()
304 u32 *cs; in intel_vgpu_restore_inhibit_context() local
306 cs = intel_ring_begin(req, 2); in intel_vgpu_restore_inhibit_context()
307 if (IS_ERR(cs)) in intel_vgpu_restore_inhibit_context()
308 return PTR_ERR(cs); in intel_vgpu_restore_inhibit_context()
310 *cs++ = MI_ARB_ON_OFF | MI_ARB_DISABLE; in intel_vgpu_restore_inhibit_context()
311 *cs++ = MI_NOOP; in intel_vgpu_restore_inhibit_context()
312 intel_ring_advance(req, cs); in intel_vgpu_restore_inhibit_context()
319 if (req->engine->id != RCS0) in intel_vgpu_restore_inhibit_context()
331 cs = intel_ring_begin(req, 2); in intel_vgpu_restore_inhibit_context()
332 if (IS_ERR(cs)) in intel_vgpu_restore_inhibit_context()
333 return PTR_ERR(cs); in intel_vgpu_restore_inhibit_context()
335 *cs++ = MI_ARB_ON_OFF | MI_ARB_ENABLE; in intel_vgpu_restore_inhibit_context()
336 *cs++ = MI_NOOP; in intel_vgpu_restore_inhibit_context()
337 intel_ring_advance(req, cs); in intel_vgpu_restore_inhibit_context()
353 struct intel_uncore *uncore = engine->uncore; in handle_tlb_pending_event()
354 struct intel_vgpu_submission *s = &vgpu->submission; in handle_tlb_pending_event()
355 u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list; in handle_tlb_pending_event()
356 u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt; in handle_tlb_pending_event()
363 if (drm_WARN_ON(&engine->i915->drm, engine->id >= cnt)) in handle_tlb_pending_event()
366 if (!test_and_clear_bit(engine->id, (void *)s->tlb_handle_pending)) in handle_tlb_pending_event()
369 reg = _MMIO(regs[engine->id]); in handle_tlb_pending_event()
378 if (engine->id == RCS0 && GRAPHICS_VER(engine->i915) >= 9) in handle_tlb_pending_event()
387 engine->name); in handle_tlb_pending_event()
393 gvt_dbg_core("invalidate TLB for ring %s\n", engine->name); in handle_tlb_pending_event()
406 struct intel_uncore *uncore = engine->uncore; in switch_mocs()
411 if (drm_WARN_ON(&engine->i915->drm, engine->id >= ARRAY_SIZE(regs))) in switch_mocs()
414 if (engine->id == RCS0 && GRAPHICS_VER(engine->i915) == 9) in switch_mocs()
420 offset.reg = regs[engine->id]; in switch_mocs()
425 old_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
429 new_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
437 if (engine->id == RCS0) { in switch_mocs()
461 const u32 *reg_state = ce->lrc_reg_state; in is_inhibit_context()
474 struct intel_uncore *uncore = engine->uncore; in switch_mmio()
479 if (GRAPHICS_VER(engine->i915) >= 9) in switch_mmio()
482 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio()
483 i915_mmio_reg_valid(mmio->reg); mmio++) { in switch_mmio()
484 if (mmio->id != engine->id) in switch_mmio()
491 if (GRAPHICS_VER(engine->i915) == 9 && mmio->in_context) in switch_mmio()
496 vgpu_vreg_t(pre, mmio->reg) = in switch_mmio()
497 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
498 if (mmio->mask) in switch_mmio()
499 vgpu_vreg_t(pre, mmio->reg) &= in switch_mmio()
500 ~(mmio->mask << 16); in switch_mmio()
501 old_v = vgpu_vreg_t(pre, mmio->reg); in switch_mmio()
503 old_v = mmio->value = in switch_mmio()
504 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
509 s = &next->submission; in switch_mmio()
515 if (mmio->in_context && in switch_mmio()
516 !is_inhibit_context(s->shadow[engine->id])) in switch_mmio()
519 if (mmio->mask) in switch_mmio()
520 new_v = vgpu_vreg_t(next, mmio->reg) | in switch_mmio()
521 (mmio->mask << 16); in switch_mmio()
523 new_v = vgpu_vreg_t(next, mmio->reg); in switch_mmio()
525 if (mmio->in_context) in switch_mmio()
527 if (mmio->mask) in switch_mmio()
528 new_v = mmio->value | (mmio->mask << 16); in switch_mmio()
530 new_v = mmio->value; in switch_mmio()
533 intel_uncore_write_fw(uncore, mmio->reg, new_v); in switch_mmio()
535 trace_render_mmio(pre ? pre->id : 0, in switch_mmio()
536 next ? next->id : 0, in switch_mmio()
538 i915_mmio_reg_offset(mmio->reg), in switch_mmio()
547 * intel_gvt_switch_render_mmio - switch mmio context of specific engine
560 engine->name)) in intel_gvt_switch_mmio()
563 gvt_dbg_render("switch ring %s from %s to %s\n", engine->name, in intel_gvt_switch_mmio()
571 intel_uncore_forcewake_get(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
573 intel_uncore_forcewake_put(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
577 * intel_gvt_init_engine_mmio_context - Initiate the engine mmio list
585 if (GRAPHICS_VER(gvt->gt->i915) >= 9) { in intel_gvt_init_engine_mmio_context()
586 gvt->engine_mmio_list.mmio = gen9_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
587 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
588 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
589 gvt->engine_mmio_list.mocs_mmio_offset_list = gen9_mocs_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
590 gvt->engine_mmio_list.mocs_mmio_offset_list_cnt = ARRAY_SIZE(gen9_mocs_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
592 gvt->engine_mmio_list.mmio = gen8_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
593 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
594 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
597 for (mmio = gvt->engine_mmio_list.mmio; in intel_gvt_init_engine_mmio_context()
598 i915_mmio_reg_valid(mmio->reg); mmio++) { in intel_gvt_init_engine_mmio_context()
599 if (mmio->in_context) { in intel_gvt_init_engine_mmio_context()
600 gvt->engine_mmio_list.ctx_mmio_count[mmio->id]++; in intel_gvt_init_engine_mmio_context()
601 intel_gvt_mmio_set_sr_in_ctx(gvt, mmio->reg.reg); in intel_gvt_init_engine_mmio_context()