Home
last modified time | relevance | path

Searched refs:batch (Results 1 – 25 of 153) sorted by relevance

1234567

/Linux-v5.10/mm/
Dmmu_gather.c18 struct mmu_gather_batch *batch; in tlb_next_batch() local
20 batch = tlb->active; in tlb_next_batch()
21 if (batch->next) { in tlb_next_batch()
22 tlb->active = batch->next; in tlb_next_batch()
29 batch = (void *)__get_free_pages(GFP_NOWAIT | __GFP_NOWARN, 0); in tlb_next_batch()
30 if (!batch) in tlb_next_batch()
34 batch->next = NULL; in tlb_next_batch()
35 batch->nr = 0; in tlb_next_batch()
36 batch->max = MAX_GATHER_BATCH; in tlb_next_batch()
38 tlb->active->next = batch; in tlb_next_batch()
[all …]
/Linux-v5.10/arch/powerpc/mm/book3s64/
Dhash_tlb.c44 struct ppc64_tlb_batch *batch = &get_cpu_var(ppc64_tlb_batch); in hpte_need_flush() local
51 i = batch->index; in hpte_need_flush()
103 if (!batch->active) { in hpte_need_flush()
119 if (i != 0 && (mm != batch->mm || batch->psize != psize || in hpte_need_flush()
120 batch->ssize != ssize)) { in hpte_need_flush()
121 __flush_tlb_pending(batch); in hpte_need_flush()
125 batch->mm = mm; in hpte_need_flush()
126 batch->psize = psize; in hpte_need_flush()
127 batch->ssize = ssize; in hpte_need_flush()
129 batch->pte[i] = rpte; in hpte_need_flush()
[all …]
/Linux-v5.10/drivers/gpu/drm/i915/selftests/
Digt_spinner.c47 spin->batch = vaddr; in igt_spinner_init()
97 u32 *batch; in igt_spinner_create_request() local
135 batch = spin->batch; in igt_spinner_create_request()
138 *batch++ = MI_STORE_DWORD_IMM_GEN4; in igt_spinner_create_request()
139 *batch++ = lower_32_bits(hws_address(hws, rq)); in igt_spinner_create_request()
140 *batch++ = upper_32_bits(hws_address(hws, rq)); in igt_spinner_create_request()
142 *batch++ = MI_STORE_DWORD_IMM_GEN4; in igt_spinner_create_request()
143 *batch++ = 0; in igt_spinner_create_request()
144 *batch++ = hws_address(hws, rq); in igt_spinner_create_request()
146 *batch++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT; in igt_spinner_create_request()
[all …]
Di915_request.c661 struct i915_vma *batch) in empty_request() argument
671 batch->node.start, in empty_request()
672 batch->node.size, in empty_request()
688 struct i915_vma *batch; in live_empty_request() local
697 batch = empty_batch(i915); in live_empty_request()
698 if (IS_ERR(batch)) in live_empty_request()
699 return PTR_ERR(batch); in live_empty_request()
714 request = empty_request(engine, batch); in live_empty_request()
727 request = empty_request(engine, batch); in live_empty_request()
757 i915_vma_unpin(batch); in live_empty_request()
[all …]
/Linux-v5.10/drivers/gpu/drm/i915/gem/
Di915_gem_object_blt.c23 struct i915_vma *batch; in intel_emit_vma_fill_blt() local
47 batch = i915_vma_instance(pool->obj, ce->vm, NULL); in intel_emit_vma_fill_blt()
48 if (IS_ERR(batch)) { in intel_emit_vma_fill_blt()
49 err = PTR_ERR(batch); in intel_emit_vma_fill_blt()
53 err = i915_vma_pin_ww(batch, ww, 0, 0, PIN_USER); in intel_emit_vma_fill_blt()
102 batch->private = pool; in intel_emit_vma_fill_blt()
103 return batch; in intel_emit_vma_fill_blt()
106 i915_vma_unpin(batch); in intel_emit_vma_fill_blt()
151 struct i915_vma *batch; in i915_gem_object_fill_blt() local
174 batch = intel_emit_vma_fill_blt(ce, vma, &ww, value); in i915_gem_object_fill_blt()
[all …]
Di915_gem_execbuffer.c249 struct eb_vma *batch; /** identity of the batch obj/vma */ member
569 eb->batch = ev; in eb_add_vma()
835 unsigned int batch = eb_batch_index(eb); in eb_lookup_vmas() local
856 eb_add_vma(eb, i, batch, vma); in eb_lookup_vmas()
859 if (unlikely(eb->batch->flags & EXEC_OBJECT_WRITE)) { in eb_lookup_vmas()
867 eb->batch->vma->size)) { in eb_lookup_vmas()
873 eb->batch_len = eb->batch->vma->size - eb->batch_start_offset; in eb_lookup_vmas()
1044 struct drm_i915_gem_object *obj = cache->rq->batch->obj; in reloc_gpu_flush()
1275 struct i915_vma *batch; in __reloc_gpu_alloc() local
1299 batch = i915_vma_instance(pool->obj, vma->vm, NULL); in __reloc_gpu_alloc()
[all …]
Di915_gem_client_blt.c163 struct i915_vma *batch; in clear_pages_worker() local
184 batch = intel_emit_vma_fill_blt(w->ce, vma, &ww, w->value); in clear_pages_worker()
185 if (IS_ERR(batch)) { in clear_pages_worker()
186 err = PTR_ERR(batch); in clear_pages_worker()
201 err = intel_emit_vma_mark_active(batch, rq); in clear_pages_worker()
221 batch->node.start, batch->node.size, in clear_pages_worker()
231 intel_emit_vma_release(w->ce, batch); in clear_pages_worker()
/Linux-v5.10/drivers/gpu/drm/i915/gt/
Dgen7_renderclear.c214 gen7_emit_state_base_address(struct batch_chunk *batch, in gen7_emit_state_base_address() argument
217 u32 *cs = batch_alloc_items(batch, 0, 12); in gen7_emit_state_base_address()
221 *cs++ = batch_addr(batch) | BASE_ADDRESS_MODIFY; in gen7_emit_state_base_address()
223 *cs++ = batch_addr(batch) | surface_state_base | BASE_ADDRESS_MODIFY; in gen7_emit_state_base_address()
225 *cs++ = batch_addr(batch) | BASE_ADDRESS_MODIFY; in gen7_emit_state_base_address()
227 *cs++ = batch_addr(batch) | BASE_ADDRESS_MODIFY; in gen7_emit_state_base_address()
229 *cs++ = batch_addr(batch) | BASE_ADDRESS_MODIFY; in gen7_emit_state_base_address()
238 batch_advance(batch, cs); in gen7_emit_state_base_address()
242 gen7_emit_vfe_state(struct batch_chunk *batch, in gen7_emit_vfe_state() argument
249 u32 *cs = batch_alloc_items(batch, 32, 8); in gen7_emit_vfe_state()
[all …]
Dselftest_hangcheck.c53 u32 *batch; member
96 h->batch = vaddr; in hang_init()
143 u32 *batch; in hang_create_request() local
163 h->batch = vaddr; in hang_create_request()
201 batch = h->batch; in hang_create_request()
203 *batch++ = MI_STORE_DWORD_IMM_GEN4; in hang_create_request()
204 *batch++ = lower_32_bits(hws_address(hws, rq)); in hang_create_request()
205 *batch++ = upper_32_bits(hws_address(hws, rq)); in hang_create_request()
206 *batch++ = rq->fence.seqno; in hang_create_request()
207 *batch++ = MI_NOOP; in hang_create_request()
[all …]
Dintel_engine.h226 static inline u32 *__gen8_emit_pipe_control(u32 *batch, u32 flags0, u32 flags1, u32 offset) in __gen8_emit_pipe_control() argument
228 memset(batch, 0, 6 * sizeof(u32)); in __gen8_emit_pipe_control()
230 batch[0] = GFX_OP_PIPE_CONTROL(6) | flags0; in __gen8_emit_pipe_control()
231 batch[1] = flags1; in __gen8_emit_pipe_control()
232 batch[2] = offset; in __gen8_emit_pipe_control()
234 return batch + 6; in __gen8_emit_pipe_control()
237 static inline u32 *gen8_emit_pipe_control(u32 *batch, u32 flags, u32 offset) in gen8_emit_pipe_control() argument
239 return __gen8_emit_pipe_control(batch, 0, flags, offset); in gen8_emit_pipe_control()
242 static inline u32 *gen12_emit_pipe_control(u32 *batch, u32 flags0, u32 flags1, u32 offset) in gen12_emit_pipe_control() argument
244 return __gen8_emit_pipe_control(batch, flags0, flags1, offset); in gen12_emit_pipe_control()
Dselftest_workarounds.c488 struct i915_vma *batch; in check_dirty_whitelist() local
496 batch = create_batch(ce->vm); in check_dirty_whitelist()
497 if (IS_ERR(batch)) { in check_dirty_whitelist()
498 err = PTR_ERR(batch); in check_dirty_whitelist()
530 cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); in check_dirty_whitelist()
579 i915_gem_object_flush_map(batch->obj); in check_dirty_whitelist()
580 i915_gem_object_unpin_map(batch->obj); in check_dirty_whitelist()
595 i915_vma_lock(batch); in check_dirty_whitelist()
596 err = i915_request_await_object(rq, batch->obj, false); in check_dirty_whitelist()
598 err = i915_vma_move_to_active(batch, rq, 0); in check_dirty_whitelist()
[all …]
Dselftest_engine_cs.c131 struct i915_vma *batch; in perf_mi_bb_start() local
137 batch = create_empty_batch(ce); in perf_mi_bb_start()
138 if (IS_ERR(batch)) { in perf_mi_bb_start()
139 err = PTR_ERR(batch); in perf_mi_bb_start()
144 err = i915_vma_sync(batch); in perf_mi_bb_start()
147 i915_vma_put(batch); in perf_mi_bb_start()
165 batch->node.start, 8, in perf_mi_bb_start()
186 i915_vma_put(batch); in perf_mi_bb_start()
/Linux-v5.10/arch/powerpc/include/asm/book3s/64/
Dtlbflush-hash.h25 extern void __flush_tlb_pending(struct ppc64_tlb_batch *batch);
31 struct ppc64_tlb_batch *batch; in arch_enter_lazy_mmu_mode() local
35 batch = this_cpu_ptr(&ppc64_tlb_batch); in arch_enter_lazy_mmu_mode()
36 batch->active = 1; in arch_enter_lazy_mmu_mode()
41 struct ppc64_tlb_batch *batch; in arch_leave_lazy_mmu_mode() local
45 batch = this_cpu_ptr(&ppc64_tlb_batch); in arch_leave_lazy_mmu_mode()
47 if (batch->index) in arch_leave_lazy_mmu_mode()
48 __flush_tlb_pending(batch); in arch_leave_lazy_mmu_mode()
49 batch->active = 0; in arch_leave_lazy_mmu_mode()
/Linux-v5.10/drivers/gpu/drm/i915/gem/selftests/
Digt_gem_utils.c114 struct i915_vma *batch; in igt_gpu_fill_dw() local
121 batch = igt_emit_store_dw(vma, offset, count, val); in igt_gpu_fill_dw()
122 if (IS_ERR(batch)) in igt_gpu_fill_dw()
123 return PTR_ERR(batch); in igt_gpu_fill_dw()
131 i915_vma_lock(batch); in igt_gpu_fill_dw()
132 err = i915_request_await_object(rq, batch->obj, false); in igt_gpu_fill_dw()
134 err = i915_vma_move_to_active(batch, rq, 0); in igt_gpu_fill_dw()
135 i915_vma_unlock(batch); in igt_gpu_fill_dw()
152 batch->node.start, batch->node.size, in igt_gpu_fill_dw()
160 i915_vma_unpin_and_release(&batch, 0); in igt_gpu_fill_dw()
Di915_gem_client_blt.c146 struct i915_vma *batch; member
155 struct drm_i915_gem_object *batch) in prepare_blit() argument
157 const int gen = INTEL_GEN(to_i915(batch->base.dev)); in prepare_blit()
162 cs = i915_gem_object_pin_map(batch, I915_MAP_WC); in prepare_blit()
214 i915_gem_object_flush_map(batch); in prepare_blit()
215 i915_gem_object_unpin_map(batch); in prepare_blit()
228 i915_vma_put(t->batch); in tiled_blits_destroy_buffers()
267 t->batch = __create_vma(t, PAGE_SIZE, false); in tiled_blits_create_buffers()
268 if (IS_ERR(t->batch)) in tiled_blits_create_buffers()
269 return PTR_ERR(t->batch); in tiled_blits_create_buffers()
[all …]
/Linux-v5.10/drivers/xen/
Dgntdev.c726 static int gntdev_get_page(struct gntdev_copy_batch *batch, void __user *virt, in gntdev_get_page() argument
734 ret = pin_user_pages_fast(addr, 1, batch->writeable ? FOLL_WRITE : 0, &page); in gntdev_get_page()
738 batch->pages[batch->nr_pages++] = page; in gntdev_get_page()
746 static void gntdev_put_pages(struct gntdev_copy_batch *batch) in gntdev_put_pages() argument
748 unpin_user_pages_dirty_lock(batch->pages, batch->nr_pages, batch->writeable); in gntdev_put_pages()
749 batch->nr_pages = 0; in gntdev_put_pages()
750 batch->writeable = false; in gntdev_put_pages()
753 static int gntdev_copy(struct gntdev_copy_batch *batch) in gntdev_copy() argument
757 gnttab_batch_copy(batch->ops, batch->nr_ops); in gntdev_copy()
758 gntdev_put_pages(batch); in gntdev_copy()
[all …]
/Linux-v5.10/drivers/gpu/drm/vmwgfx/
Dvmwgfx_mob.c236 struct vmw_otable_batch *batch) in vmw_otable_batch_setup() argument
240 struct vmw_otable *otables = batch->otables; in vmw_otable_batch_setup()
245 for (i = 0; i < batch->num_otables; ++i) { in vmw_otable_batch_setup()
254 ret = vmw_bo_create_and_populate(dev_priv, bo_size, &batch->otable_bo); in vmw_otable_batch_setup()
259 for (i = 0; i < batch->num_otables; ++i) { in vmw_otable_batch_setup()
260 if (!batch->otables[i].enabled) in vmw_otable_batch_setup()
263 ret = vmw_setup_otable_base(dev_priv, i, batch->otable_bo, in vmw_otable_batch_setup()
274 for (i = 0; i < batch->num_otables; ++i) { in vmw_otable_batch_setup()
275 if (batch->otables[i].enabled) in vmw_otable_batch_setup()
277 &batch->otables[i]); in vmw_otable_batch_setup()
[all …]
/Linux-v5.10/tools/virtio/
Dvirtio_test.c169 bool delayed, int batch, int reset_n, int bufs) in run_test() argument
177 const bool random_batch = batch == RANDOM_BATCH; in run_test()
192 batch = (random() % vq->vring.num) + 1; in run_test()
195 (started - completed) < batch) { in run_test()
346 long batch = 1, reset = 0; in main() local
375 batch = RANDOM_BATCH; in main()
377 batch = strtol(optarg, NULL, 10); in main()
378 assert(batch > 0); in main()
379 assert(batch < (long)INT_MAX + 1); in main()
400 run_test(&dev, &dev.vqs[0], delayed, batch, reset, 0x100000); in main()
/Linux-v5.10/tools/testing/selftests/bpf/map_tests/
Dhtab_map_batch_ops.c78 __u32 batch, count, total, total_success; in __test_map_lookup_and_delete_batch() local
115 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
125 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
133 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
159 total ? &batch : NULL, in __test_map_lookup_and_delete_batch()
160 &batch, keys + total, in __test_map_lookup_and_delete_batch()
222 total ? &batch : NULL, in __test_map_lookup_and_delete_batch()
223 &batch, keys + total, in __test_map_lookup_and_delete_batch()
Darray_map_batch_ops.c59 __u64 batch = 0; in test_array_map_batch_ops() local
87 batch = 0; in test_array_map_batch_ops()
96 total ? &batch : NULL, &batch, in test_array_map_batch_ops()
/Linux-v5.10/arch/x86/include/asm/
Dtlbflush.h249 static inline void arch_tlbbatch_add_mm(struct arch_tlbflush_unmap_batch *batch, in arch_tlbbatch_add_mm() argument
253 cpumask_or(&batch->cpumask, &batch->cpumask, mm_cpumask(mm)); in arch_tlbbatch_add_mm()
256 extern void arch_tlbbatch_flush(struct arch_tlbflush_unmap_batch *batch);
/Linux-v5.10/drivers/net/ethernet/netronome/nfp/flower/
Dlag_conf.c204 unsigned int member_cnt, enum nfp_fl_lag_batch *batch) in nfp_fl_lag_config_group() argument
224 if (*batch == NFP_FL_LAG_BATCH_FIRST) { in nfp_fl_lag_config_group()
227 *batch = NFP_FL_LAG_BATCH_MEMBER; in nfp_fl_lag_config_group()
233 *batch = NFP_FL_LAG_BATCH_FINISHED; in nfp_fl_lag_config_group()
239 if (*batch == NFP_FL_LAG_BATCH_FINISHED) { in nfp_fl_lag_config_group()
266 enum nfp_fl_lag_batch batch = NFP_FL_LAG_BATCH_FIRST; in nfp_fl_lag_do_work() local
288 &batch); in nfp_fl_lag_do_work()
357 active_count, &batch); in nfp_fl_lag_do_work()
371 if (batch == NFP_FL_LAG_BATCH_MEMBER) { in nfp_fl_lag_do_work()
372 batch = NFP_FL_LAG_BATCH_FINISHED; in nfp_fl_lag_do_work()
[all …]
/Linux-v5.10/tools/testing/selftests/bpf/prog_tests/
Dbpf_tcp_ca.c52 char batch[1500]; in server() local
68 nr_sent = send(fd, &batch, in server()
69 min(total_bytes - bytes, sizeof(batch)), 0); in server()
100 char batch[1500]; in do_test() local
161 nr_recv = recv(fd, &batch, in do_test()
162 min(total_bytes - bytes, sizeof(batch)), 0); in do_test()
/Linux-v5.10/tools/virtio/ringtest/
Dmain.c22 int batch = 1; variable
116 int tokick = batch; in run_guest()
129 tokick = batch; in run_guest()
348 batch = c; in main()
372 if (batch > max_outstanding) in main()
373 batch = max_outstanding; in main()
/Linux-v5.10/net/core/
Dnetclassid_cgroup.c64 unsigned int batch; member
80 if (--ctx->batch == 0) { in update_classid_sock()
81 ctx->batch = UPDATE_CLASSID_BATCH; in update_classid_sock()
91 .batch = UPDATE_CLASSID_BATCH in update_classid_task()

1234567