Lines Matching refs:exec_flags

289 #define exec_entry(EB, VMA) (&(EB)->exec[(VMA)->exec_flags - (EB)->flags])
396 unsigned int exec_flags = *vma->exec_flags; in eb_pin_vma() local
405 if (unlikely(exec_flags & EXEC_OBJECT_NEEDS_GTT)) in eb_pin_vma()
411 if (unlikely(exec_flags & EXEC_OBJECT_NEEDS_FENCE)) { in eb_pin_vma()
418 exec_flags |= __EXEC_OBJECT_HAS_FENCE; in eb_pin_vma()
421 *vma->exec_flags = exec_flags | __EXEC_OBJECT_HAS_PIN; in eb_pin_vma()
422 return !eb_vma_misplaced(entry, vma, exec_flags); in eb_pin_vma()
472 if (unlikely(vma->exec_flags)) { in eb_validate_vma()
534 vma->exec_flags = &eb->flags[i]; in eb_add_vma()
562 eb_unreserve_vma(vma, vma->exec_flags); in eb_add_vma()
568 vma->exec_flags = NULL; in eb_add_vma()
594 unsigned int exec_flags = *vma->exec_flags; in eb_reserve_vma() local
599 if (exec_flags & EXEC_OBJECT_NEEDS_GTT) in eb_reserve_vma()
606 if (!(exec_flags & EXEC_OBJECT_SUPPORTS_48B_ADDRESS)) in eb_reserve_vma()
609 if (exec_flags & __EXEC_OBJECT_NEEDS_MAP) in eb_reserve_vma()
612 if (exec_flags & EXEC_OBJECT_PINNED) { in eb_reserve_vma()
615 } else if (exec_flags & __EXEC_OBJECT_NEEDS_BIAS) { in eb_reserve_vma()
630 if (unlikely(exec_flags & EXEC_OBJECT_NEEDS_FENCE)) { in eb_reserve_vma()
638 exec_flags |= __EXEC_OBJECT_HAS_FENCE; in eb_reserve_vma()
641 *vma->exec_flags = exec_flags | __EXEC_OBJECT_HAS_PIN; in eb_reserve_vma()
642 GEM_BUG_ON(eb_vma_misplaced(entry, vma, exec_flags)); in eb_reserve_vma()
810 GEM_BUG_ON(vma->exec_flags != &eb->flags[i]); in eb_lookup_vmas()
857 GEM_BUG_ON(vma->exec_flags != &eb->flags[i]); in eb_release_vmas()
858 vma->exec_flags = NULL; in eb_release_vmas()
1355 *target->exec_flags |= EXEC_OBJECT_WRITE; in eb_relocate_entry()
1407 *vma->exec_flags &= ~EXEC_OBJECT_ASYNC; in eb_relocate_entry()
1834 vma->exec_flags = NULL; in eb_move_to_gpu()
1928 vma->exec_flags = &eb->flags[eb->buffer_count]; in eb_parse()
2288 if (unlikely(*eb.batch->exec_flags & EXEC_OBJECT_WRITE)) { in i915_gem_do_execbuffer()