Lines Matching refs:tl

324 void __intel_timeline_pin(struct intel_timeline *tl)  in __intel_timeline_pin()  argument
326 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in __intel_timeline_pin()
327 atomic_inc(&tl->pin_count); in __intel_timeline_pin()
330 int intel_timeline_pin(struct intel_timeline *tl, struct i915_gem_ww_ctx *ww) in intel_timeline_pin() argument
334 if (atomic_add_unless(&tl->pin_count, 1, 0)) in intel_timeline_pin()
337 err = i915_ggtt_pin(tl->hwsp_ggtt, ww, 0, PIN_HIGH); in intel_timeline_pin()
341 tl->hwsp_offset = in intel_timeline_pin()
342 i915_ggtt_offset(tl->hwsp_ggtt) + in intel_timeline_pin()
343 offset_in_page(tl->hwsp_offset); in intel_timeline_pin()
344 GT_TRACE(tl->gt, "timeline:%llx using HWSP offset:%x\n", in intel_timeline_pin()
345 tl->fence_context, tl->hwsp_offset); in intel_timeline_pin()
347 cacheline_acquire(tl->hwsp_cacheline, tl->hwsp_offset); in intel_timeline_pin()
348 if (atomic_fetch_inc(&tl->pin_count)) { in intel_timeline_pin()
349 cacheline_release(tl->hwsp_cacheline); in intel_timeline_pin()
350 __i915_vma_unpin(tl->hwsp_ggtt); in intel_timeline_pin()
356 void intel_timeline_reset_seqno(const struct intel_timeline *tl) in intel_timeline_reset_seqno() argument
359 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in intel_timeline_reset_seqno()
360 WRITE_ONCE(*(u32 *)tl->hwsp_seqno, tl->seqno); in intel_timeline_reset_seqno()
363 void intel_timeline_enter(struct intel_timeline *tl) in intel_timeline_enter() argument
365 struct intel_gt_timelines *timelines = &tl->gt->timelines; in intel_timeline_enter()
385 lockdep_assert_held(&tl->mutex); in intel_timeline_enter()
387 if (atomic_add_unless(&tl->active_count, 1, 0)) in intel_timeline_enter()
391 if (!atomic_fetch_inc(&tl->active_count)) { in intel_timeline_enter()
398 intel_timeline_reset_seqno(tl); in intel_timeline_enter()
399 list_add_tail(&tl->link, &timelines->active_list); in intel_timeline_enter()
404 void intel_timeline_exit(struct intel_timeline *tl) in intel_timeline_exit() argument
406 struct intel_gt_timelines *timelines = &tl->gt->timelines; in intel_timeline_exit()
409 lockdep_assert_held(&tl->mutex); in intel_timeline_exit()
411 GEM_BUG_ON(!atomic_read(&tl->active_count)); in intel_timeline_exit()
412 if (atomic_add_unless(&tl->active_count, -1, 1)) in intel_timeline_exit()
416 if (atomic_dec_and_test(&tl->active_count)) in intel_timeline_exit()
417 list_del(&tl->link); in intel_timeline_exit()
425 i915_syncmap_free(&tl->sync); in intel_timeline_exit()
428 static u32 timeline_advance(struct intel_timeline *tl) in timeline_advance() argument
430 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in timeline_advance()
431 GEM_BUG_ON(tl->seqno & tl->has_initial_breadcrumb); in timeline_advance()
433 return tl->seqno += 1 + tl->has_initial_breadcrumb; in timeline_advance()
436 static void timeline_rollback(struct intel_timeline *tl) in timeline_rollback() argument
438 tl->seqno -= 1 + tl->has_initial_breadcrumb; in timeline_rollback()
442 __intel_timeline_get_seqno(struct intel_timeline *tl, in __intel_timeline_get_seqno() argument
452 might_lock(&tl->gt->ggtt->vm.mutex); in __intel_timeline_get_seqno()
453 GT_TRACE(tl->gt, "timeline:%llx wrapped\n", tl->fence_context); in __intel_timeline_get_seqno()
474 vma = hwsp_alloc(tl, &cacheline); in __intel_timeline_get_seqno()
499 err = i915_active_ref(&tl->hwsp_cacheline->active, in __intel_timeline_get_seqno()
500 tl->fence_context, in __intel_timeline_get_seqno()
505 cacheline_release(tl->hwsp_cacheline); /* ownership now xfered to rq */ in __intel_timeline_get_seqno()
506 cacheline_free(tl->hwsp_cacheline); in __intel_timeline_get_seqno()
508 i915_vma_unpin(tl->hwsp_ggtt); /* binding kept alive by old cacheline */ in __intel_timeline_get_seqno()
509 i915_vma_put(tl->hwsp_ggtt); in __intel_timeline_get_seqno()
511 tl->hwsp_ggtt = i915_vma_get(vma); in __intel_timeline_get_seqno()
514 tl->hwsp_offset = cacheline * CACHELINE_BYTES; in __intel_timeline_get_seqno()
515 tl->hwsp_seqno = in __intel_timeline_get_seqno()
516 memset(vaddr + tl->hwsp_offset, 0, CACHELINE_BYTES); in __intel_timeline_get_seqno()
518 tl->hwsp_offset += i915_ggtt_offset(vma); in __intel_timeline_get_seqno()
519 GT_TRACE(tl->gt, "timeline:%llx using HWSP offset:%x\n", in __intel_timeline_get_seqno()
520 tl->fence_context, tl->hwsp_offset); in __intel_timeline_get_seqno()
522 cacheline_acquire(cl, tl->hwsp_offset); in __intel_timeline_get_seqno()
523 tl->hwsp_cacheline = cl; in __intel_timeline_get_seqno()
525 *seqno = timeline_advance(tl); in __intel_timeline_get_seqno()
526 GEM_BUG_ON(i915_seqno_passed(*tl->hwsp_seqno, *seqno)); in __intel_timeline_get_seqno()
534 timeline_rollback(tl); in __intel_timeline_get_seqno()
538 int intel_timeline_get_seqno(struct intel_timeline *tl, in intel_timeline_get_seqno() argument
542 *seqno = timeline_advance(tl); in intel_timeline_get_seqno()
545 if (unlikely(!*seqno && tl->hwsp_cacheline)) in intel_timeline_get_seqno()
546 return __intel_timeline_get_seqno(tl, rq, seqno); in intel_timeline_get_seqno()
592 void intel_timeline_unpin(struct intel_timeline *tl) in intel_timeline_unpin() argument
594 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in intel_timeline_unpin()
595 if (!atomic_dec_and_test(&tl->pin_count)) in intel_timeline_unpin()
598 cacheline_release(tl->hwsp_cacheline); in intel_timeline_unpin()
600 __i915_vma_unpin(tl->hwsp_ggtt); in intel_timeline_unpin()