Lines Matching full:so
38 * right after the commands taking care of alignment so we should sufficient
48 static int render_state_setup(struct intel_renderstate *so, in render_state_setup() argument
51 const struct intel_renderstate_rodata *rodata = so->rodata; in render_state_setup()
56 d = i915_gem_object_pin_map(so->vma->obj, I915_MAP_WB); in render_state_setup()
64 u64 r = s + so->vma->node.start; in render_state_setup()
87 so->batch_offset = i915_ggtt_offset(so->vma); in render_state_setup()
88 so->batch_size = rodata->batch_items * sizeof(u32); in render_state_setup()
93 so->aux_offset = i * sizeof(u32); in render_state_setup()
123 so->aux_size = i * sizeof(u32) - so->aux_offset; in render_state_setup()
124 so->aux_offset += so->batch_offset; in render_state_setup()
129 so->aux_size = ALIGN(so->aux_size, 8); in render_state_setup()
133 __i915_gem_object_flush_map(so->vma->obj, 0, i * sizeof(u32)); in render_state_setup()
134 __i915_gem_object_release_map(so->vma->obj); in render_state_setup()
140 int intel_renderstate_init(struct intel_renderstate *so, in intel_renderstate_init() argument
147 memset(so, 0, sizeof(*so)); in intel_renderstate_init()
149 so->rodata = render_state_get_rodata(engine); in intel_renderstate_init()
150 if (so->rodata) { in intel_renderstate_init()
151 if (so->rodata->batch_items * 4 > PAGE_SIZE) in intel_renderstate_init()
158 so->vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); in intel_renderstate_init()
159 if (IS_ERR(so->vma)) { in intel_renderstate_init()
160 err = PTR_ERR(so->vma); in intel_renderstate_init()
165 i915_gem_ww_ctx_init(&so->ww, true); in intel_renderstate_init()
167 err = intel_context_pin_ww(ce, &so->ww); in intel_renderstate_init()
172 if (!err && !so->rodata) in intel_renderstate_init()
175 err = i915_gem_object_lock(so->vma->obj, &so->ww); in intel_renderstate_init()
179 err = i915_vma_pin_ww(so->vma, &so->ww, 0, 0, PIN_GLOBAL | PIN_HIGH); in intel_renderstate_init()
183 err = render_state_setup(so, engine->i915); in intel_renderstate_init()
190 i915_vma_unpin(so->vma); in intel_renderstate_init()
195 err = i915_gem_ww_ctx_backoff(&so->ww); in intel_renderstate_init()
199 i915_gem_ww_ctx_fini(&so->ww); in intel_renderstate_init()
203 so->vma = NULL; in intel_renderstate_init()
207 int intel_renderstate_emit(struct intel_renderstate *so, in intel_renderstate_emit() argument
213 if (!so->vma) in intel_renderstate_emit()
216 err = i915_request_await_object(rq, so->vma->obj, false); in intel_renderstate_emit()
218 err = i915_vma_move_to_active(so->vma, rq, 0); in intel_renderstate_emit()
223 so->batch_offset, so->batch_size, in intel_renderstate_emit()
228 if (so->aux_size > 8) { in intel_renderstate_emit()
230 so->aux_offset, so->aux_size, in intel_renderstate_emit()
239 void intel_renderstate_fini(struct intel_renderstate *so, in intel_renderstate_fini() argument
242 if (so->vma) { in intel_renderstate_fini()
243 i915_vma_unpin(so->vma); in intel_renderstate_fini()
244 i915_vma_close(so->vma); in intel_renderstate_fini()
248 i915_gem_ww_ctx_fini(&so->ww); in intel_renderstate_fini()
250 if (so->vma) in intel_renderstate_fini()
251 i915_gem_object_put(so->vma->obj); in intel_renderstate_fini()