Lines Matching refs:sync

253 	struct sync_area *sync;  in prepare_vm()  local
321 sync = (typeof(sync))vm_gpa2hva(data, MEM_SYNC_GPA, NULL); in prepare_vm()
322 atomic_init(&sync->start_flag, false); in prepare_vm()
323 atomic_init(&sync->exit_flag, false); in prepare_vm()
324 atomic_init(&sync->sync_flag, false); in prepare_vm()
353 static void let_guest_run(struct sync_area *sync) in let_guest_run() argument
355 atomic_store_explicit(&sync->start_flag, true, memory_order_release); in let_guest_run()
360 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_spin_until_start() local
362 while (!atomic_load_explicit(&sync->start_flag, memory_order_acquire)) in guest_spin_until_start()
366 static void make_guest_exit(struct sync_area *sync) in make_guest_exit() argument
368 atomic_store_explicit(&sync->exit_flag, true, memory_order_release); in make_guest_exit()
373 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in _guest_should_exit() local
375 return atomic_load_explicit(&sync->exit_flag, memory_order_acquire); in _guest_should_exit()
386 static noinline void host_perform_sync(struct sync_area *sync) in host_perform_sync() argument
390 atomic_store_explicit(&sync->sync_flag, true, memory_order_release); in host_perform_sync()
391 while (atomic_load_explicit(&sync->sync_flag, memory_order_acquire)) in host_perform_sync()
399 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_perform_sync() local
407 } while (!atomic_compare_exchange_weak_explicit(&sync->sync_flag, in guest_perform_sync()
417 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_code_test_memslot_move() local
418 uintptr_t base = (typeof(base))READ_ONCE(sync->move_area_ptr); in guest_code_test_memslot_move()
445 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_code_test_memslot_map() local
474 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_code_test_memslot_unmap() local
539 struct sync_area *sync, in test_memslot_move_prepare() argument
557 sync->move_area_ptr = (void *)movetestgpa; in test_memslot_move_prepare()
569 struct sync_area *sync, in test_memslot_move_prepare_active() argument
572 return test_memslot_move_prepare(data, sync, maxslots, true); in test_memslot_move_prepare_active()
576 struct sync_area *sync, in test_memslot_move_prepare_inactive() argument
579 return test_memslot_move_prepare(data, sync, maxslots, false); in test_memslot_move_prepare_inactive()
582 static void test_memslot_move_loop(struct vm_data *data, struct sync_area *sync) in test_memslot_move_loop() argument
633 static void test_memslot_map_loop(struct vm_data *data, struct sync_area *sync) in test_memslot_map_loop() argument
649 host_perform_sync(sync); in test_memslot_map_loop()
666 host_perform_sync(sync); in test_memslot_map_loop()
674 struct sync_area *sync, in test_memslot_unmap_loop_common() argument
686 host_perform_sync(sync); in test_memslot_unmap_loop_common()
692 host_perform_sync(sync); in test_memslot_unmap_loop_common()
701 struct sync_area *sync) in test_memslot_unmap_loop() argument
703 test_memslot_unmap_loop_common(data, sync, 1); in test_memslot_unmap_loop()
707 struct sync_area *sync) in test_memslot_unmap_loop_chunked() argument
709 test_memslot_unmap_loop_common(data, sync, MEM_TEST_UNMAP_CHUNK_PAGES); in test_memslot_unmap_loop_chunked()
712 static void test_memslot_rw_loop(struct vm_data *data, struct sync_area *sync) in test_memslot_rw_loop() argument
720 host_perform_sync(sync); in test_memslot_rw_loop()
733 host_perform_sync(sync); in test_memslot_rw_loop()
740 bool (*prepare)(struct vm_data *data, struct sync_area *sync,
742 void (*loop)(struct vm_data *data, struct sync_area *sync);
754 struct sync_area *sync; in test_execute() local
765 sync = (typeof(sync))vm_gpa2hva(data, MEM_SYNC_GPA, NULL); in test_execute()
768 !tdata->prepare(data, sync, maxslots)) { in test_execute()
776 let_guest_run(sync); in test_execute()
783 tdata->loop(data, sync); in test_execute()
788 make_guest_exit(sync); in test_execute()