Lines Matching refs:sync

255 	struct sync_area *sync;  in prepare_vm()  local
323 sync = (typeof(sync))vm_gpa2hva(data, MEM_SYNC_GPA, NULL); in prepare_vm()
324 atomic_init(&sync->start_flag, false); in prepare_vm()
325 atomic_init(&sync->exit_flag, false); in prepare_vm()
326 atomic_init(&sync->sync_flag, false); in prepare_vm()
355 static void let_guest_run(struct sync_area *sync) in let_guest_run() argument
357 atomic_store_explicit(&sync->start_flag, true, memory_order_release); in let_guest_run()
362 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_spin_until_start() local
364 while (!atomic_load_explicit(&sync->start_flag, memory_order_acquire)) in guest_spin_until_start()
368 static void make_guest_exit(struct sync_area *sync) in make_guest_exit() argument
370 atomic_store_explicit(&sync->exit_flag, true, memory_order_release); in make_guest_exit()
375 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in _guest_should_exit() local
377 return atomic_load_explicit(&sync->exit_flag, memory_order_acquire); in _guest_should_exit()
388 static noinline void host_perform_sync(struct sync_area *sync) in host_perform_sync() argument
392 atomic_store_explicit(&sync->sync_flag, true, memory_order_release); in host_perform_sync()
393 while (atomic_load_explicit(&sync->sync_flag, memory_order_acquire)) in host_perform_sync()
401 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_perform_sync() local
409 } while (!atomic_compare_exchange_weak_explicit(&sync->sync_flag, in guest_perform_sync()
419 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_code_test_memslot_move() local
420 uintptr_t base = (typeof(base))READ_ONCE(sync->move_area_ptr); in guest_code_test_memslot_move()
447 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_code_test_memslot_map() local
476 struct sync_area *sync = (typeof(sync))MEM_SYNC_GPA; in guest_code_test_memslot_unmap() local
541 struct sync_area *sync, in test_memslot_move_prepare() argument
559 sync->move_area_ptr = (void *)movetestgpa; in test_memslot_move_prepare()
571 struct sync_area *sync, in test_memslot_move_prepare_active() argument
574 return test_memslot_move_prepare(data, sync, maxslots, true); in test_memslot_move_prepare_active()
578 struct sync_area *sync, in test_memslot_move_prepare_inactive() argument
581 return test_memslot_move_prepare(data, sync, maxslots, false); in test_memslot_move_prepare_inactive()
584 static void test_memslot_move_loop(struct vm_data *data, struct sync_area *sync) in test_memslot_move_loop() argument
635 static void test_memslot_map_loop(struct vm_data *data, struct sync_area *sync) in test_memslot_map_loop() argument
651 host_perform_sync(sync); in test_memslot_map_loop()
668 host_perform_sync(sync); in test_memslot_map_loop()
676 struct sync_area *sync, in test_memslot_unmap_loop_common() argument
688 host_perform_sync(sync); in test_memslot_unmap_loop_common()
694 host_perform_sync(sync); in test_memslot_unmap_loop_common()
703 struct sync_area *sync) in test_memslot_unmap_loop() argument
705 test_memslot_unmap_loop_common(data, sync, 1); in test_memslot_unmap_loop()
709 struct sync_area *sync) in test_memslot_unmap_loop_chunked() argument
711 test_memslot_unmap_loop_common(data, sync, MEM_TEST_UNMAP_CHUNK_PAGES); in test_memslot_unmap_loop_chunked()
714 static void test_memslot_rw_loop(struct vm_data *data, struct sync_area *sync) in test_memslot_rw_loop() argument
722 host_perform_sync(sync); in test_memslot_rw_loop()
735 host_perform_sync(sync); in test_memslot_rw_loop()
742 bool (*prepare)(struct vm_data *data, struct sync_area *sync,
744 void (*loop)(struct vm_data *data, struct sync_area *sync);
756 struct sync_area *sync; in test_execute() local
767 sync = (typeof(sync))vm_gpa2hva(data, MEM_SYNC_GPA, NULL); in test_execute()
770 !tdata->prepare(data, sync, maxslots)) { in test_execute()
778 let_guest_run(sync); in test_execute()
785 tdata->loop(data, sync); in test_execute()
790 make_guest_exit(sync); in test_execute()