Lines Matching refs:kvm_vcpu
417 unsigned long (*get_guest_pgd)(struct kvm_vcpu *vcpu);
418 u64 (*get_pdptr)(struct kvm_vcpu *vcpu, int index);
419 int (*page_fault)(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa, u32 err,
421 void (*inject_page_fault)(struct kvm_vcpu *vcpu,
423 gpa_t (*gva_to_gpa)(struct kvm_vcpu *vcpu, gpa_t gva_or_gpa,
425 gpa_t (*translate_gpa)(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
427 int (*sync_page)(struct kvm_vcpu *vcpu,
429 void (*invlpg)(struct kvm_vcpu *vcpu, gva_t gva, hpa_t root_hpa);
486 struct kvm_vcpu *vcpu;
576 struct kvm_vcpu *vcpu;
740 int (*complete_userspace_io)(struct kvm_vcpu *vcpu);
1305 void (*vcpu_after_set_cpuid)(struct kvm_vcpu *vcpu);
1312 int (*vcpu_create)(struct kvm_vcpu *vcpu);
1313 void (*vcpu_free)(struct kvm_vcpu *vcpu);
1314 void (*vcpu_reset)(struct kvm_vcpu *vcpu, bool init_event);
1316 void (*prepare_guest_switch)(struct kvm_vcpu *vcpu);
1317 void (*vcpu_load)(struct kvm_vcpu *vcpu, int cpu);
1318 void (*vcpu_put)(struct kvm_vcpu *vcpu);
1320 void (*update_exception_bitmap)(struct kvm_vcpu *vcpu);
1321 int (*get_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
1322 int (*set_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
1323 u64 (*get_segment_base)(struct kvm_vcpu *vcpu, int seg);
1324 void (*get_segment)(struct kvm_vcpu *vcpu,
1326 int (*get_cpl)(struct kvm_vcpu *vcpu);
1327 void (*set_segment)(struct kvm_vcpu *vcpu,
1329 void (*get_cs_db_l_bits)(struct kvm_vcpu *vcpu, int *db, int *l);
1330 void (*set_cr0)(struct kvm_vcpu *vcpu, unsigned long cr0);
1331 bool (*is_valid_cr4)(struct kvm_vcpu *vcpu, unsigned long cr0);
1332 void (*set_cr4)(struct kvm_vcpu *vcpu, unsigned long cr4);
1333 int (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
1334 void (*get_idt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
1335 void (*set_idt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
1336 void (*get_gdt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
1337 void (*set_gdt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
1338 void (*sync_dirty_debug_regs)(struct kvm_vcpu *vcpu);
1339 void (*set_dr7)(struct kvm_vcpu *vcpu, unsigned long value);
1340 void (*cache_reg)(struct kvm_vcpu *vcpu, enum kvm_reg reg);
1341 unsigned long (*get_rflags)(struct kvm_vcpu *vcpu);
1342 void (*set_rflags)(struct kvm_vcpu *vcpu, unsigned long rflags);
1344 void (*tlb_flush_all)(struct kvm_vcpu *vcpu);
1345 void (*tlb_flush_current)(struct kvm_vcpu *vcpu);
1356 void (*tlb_flush_gva)(struct kvm_vcpu *vcpu, gva_t addr);
1362 void (*tlb_flush_guest)(struct kvm_vcpu *vcpu);
1364 enum exit_fastpath_completion (*run)(struct kvm_vcpu *vcpu);
1365 int (*handle_exit)(struct kvm_vcpu *vcpu,
1367 int (*skip_emulated_instruction)(struct kvm_vcpu *vcpu);
1368 void (*update_emulated_instruction)(struct kvm_vcpu *vcpu);
1369 void (*set_interrupt_shadow)(struct kvm_vcpu *vcpu, int mask);
1370 u32 (*get_interrupt_shadow)(struct kvm_vcpu *vcpu);
1371 void (*patch_hypercall)(struct kvm_vcpu *vcpu,
1373 void (*set_irq)(struct kvm_vcpu *vcpu);
1374 void (*set_nmi)(struct kvm_vcpu *vcpu);
1375 void (*queue_exception)(struct kvm_vcpu *vcpu);
1376 void (*cancel_injection)(struct kvm_vcpu *vcpu);
1377 int (*interrupt_allowed)(struct kvm_vcpu *vcpu, bool for_injection);
1378 int (*nmi_allowed)(struct kvm_vcpu *vcpu, bool for_injection);
1379 bool (*get_nmi_mask)(struct kvm_vcpu *vcpu);
1380 void (*set_nmi_mask)(struct kvm_vcpu *vcpu, bool masked);
1381 void (*enable_nmi_window)(struct kvm_vcpu *vcpu);
1382 void (*enable_irq_window)(struct kvm_vcpu *vcpu);
1383 void (*update_cr8_intercept)(struct kvm_vcpu *vcpu, int tpr, int irr);
1385 void (*refresh_apicv_exec_ctrl)(struct kvm_vcpu *vcpu);
1386 void (*hwapic_irr_update)(struct kvm_vcpu *vcpu, int max_irr);
1387 void (*hwapic_isr_update)(struct kvm_vcpu *vcpu, int isr);
1388 bool (*guest_apic_has_interrupt)(struct kvm_vcpu *vcpu);
1389 void (*load_eoi_exitmap)(struct kvm_vcpu *vcpu, u64 *eoi_exit_bitmap);
1390 void (*set_virtual_apic_mode)(struct kvm_vcpu *vcpu);
1391 void (*set_apic_access_page_addr)(struct kvm_vcpu *vcpu);
1392 int (*deliver_posted_interrupt)(struct kvm_vcpu *vcpu, int vector);
1393 int (*sync_pir_to_irr)(struct kvm_vcpu *vcpu);
1396 u64 (*get_mt_mask)(struct kvm_vcpu *vcpu, gfn_t gfn, bool is_mmio);
1398 void (*load_mmu_pgd)(struct kvm_vcpu *vcpu, hpa_t root_hpa,
1403 u64 (*get_l2_tsc_offset)(struct kvm_vcpu *vcpu);
1404 u64 (*get_l2_tsc_multiplier)(struct kvm_vcpu *vcpu);
1405 void (*write_tsc_offset)(struct kvm_vcpu *vcpu, u64 offset);
1406 void (*write_tsc_multiplier)(struct kvm_vcpu *vcpu, u64 multiplier);
1412 void (*get_exit_info)(struct kvm_vcpu *vcpu, u64 *info1, u64 *info2,
1415 int (*check_intercept)(struct kvm_vcpu *vcpu,
1419 void (*handle_exit_irqoff)(struct kvm_vcpu *vcpu);
1421 void (*request_immediate_exit)(struct kvm_vcpu *vcpu);
1423 void (*sched_in)(struct kvm_vcpu *kvm, int cpu);
1430 void (*update_cpu_dirty_logging)(struct kvm_vcpu *vcpu);
1445 int (*pre_block)(struct kvm_vcpu *vcpu);
1446 void (*post_block)(struct kvm_vcpu *vcpu);
1448 void (*vcpu_blocking)(struct kvm_vcpu *vcpu);
1449 void (*vcpu_unblocking)(struct kvm_vcpu *vcpu);
1454 void (*apicv_post_state_restore)(struct kvm_vcpu *vcpu);
1455 bool (*dy_apicv_has_pending_interrupt)(struct kvm_vcpu *vcpu);
1457 int (*set_hv_timer)(struct kvm_vcpu *vcpu, u64 guest_deadline_tsc,
1459 void (*cancel_hv_timer)(struct kvm_vcpu *vcpu);
1461 void (*setup_mce)(struct kvm_vcpu *vcpu);
1463 int (*smi_allowed)(struct kvm_vcpu *vcpu, bool for_injection);
1464 int (*enter_smm)(struct kvm_vcpu *vcpu, char *smstate);
1465 int (*leave_smm)(struct kvm_vcpu *vcpu, const char *smstate);
1466 void (*enable_smi_window)(struct kvm_vcpu *vcpu);
1475 bool (*can_emulate_instruction)(struct kvm_vcpu *vcpu, void *insn, int insn_len);
1477 bool (*apic_init_signal_blocked)(struct kvm_vcpu *vcpu);
1478 int (*enable_direct_tlbflush)(struct kvm_vcpu *vcpu);
1480 void (*migrate_timers)(struct kvm_vcpu *vcpu);
1481 void (*msr_filter_changed)(struct kvm_vcpu *vcpu);
1482 int (*complete_emulated_msr)(struct kvm_vcpu *vcpu, int err);
1484 void (*vcpu_deliver_sipi_vector)(struct kvm_vcpu *vcpu, u8 vector);
1488 int (*check_events)(struct kvm_vcpu *vcpu);
1489 bool (*hv_timer_pending)(struct kvm_vcpu *vcpu);
1490 void (*triple_fault)(struct kvm_vcpu *vcpu);
1491 int (*get_state)(struct kvm_vcpu *vcpu,
1494 int (*set_state)(struct kvm_vcpu *vcpu,
1497 bool (*get_nested_state_pages)(struct kvm_vcpu *vcpu);
1498 int (*write_log_dirty)(struct kvm_vcpu *vcpu, gpa_t l2_gpa);
1500 int (*enable_evmcs)(struct kvm_vcpu *vcpu,
1502 uint16_t (*get_evmcs_version)(struct kvm_vcpu *vcpu);
1560 void kvm_mmu_destroy(struct kvm_vcpu *vcpu);
1561 int kvm_mmu_create(struct kvm_vcpu *vcpu);
1565 void kvm_mmu_after_set_cpuid(struct kvm_vcpu *vcpu);
1566 void kvm_mmu_reset_context(struct kvm_vcpu *vcpu);
1579 int load_pdptrs(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, unsigned long cr3);
1581 int emulator_write_phys(struct kvm_vcpu *vcpu, gpa_t gpa,
1599 u64 vcpu_tsc_khz(struct kvm_vcpu *vcpu);
1658 int kvm_emulate_instruction(struct kvm_vcpu *vcpu, int emulation_type);
1659 int kvm_emulate_instruction_from_buffer(struct kvm_vcpu *vcpu,
1663 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
1664 int __kvm_get_msr(struct kvm_vcpu *vcpu, u32 index, u64 *data, bool host_initiated);
1665 int kvm_get_msr(struct kvm_vcpu *vcpu, u32 index, u64 *data);
1666 int kvm_set_msr(struct kvm_vcpu *vcpu, u32 index, u64 data);
1667 int kvm_emulate_rdmsr(struct kvm_vcpu *vcpu);
1668 int kvm_emulate_wrmsr(struct kvm_vcpu *vcpu);
1669 int kvm_emulate_as_nop(struct kvm_vcpu *vcpu);
1670 int kvm_emulate_invd(struct kvm_vcpu *vcpu);
1671 int kvm_emulate_mwait(struct kvm_vcpu *vcpu);
1672 int kvm_handle_invalid_op(struct kvm_vcpu *vcpu);
1673 int kvm_emulate_monitor(struct kvm_vcpu *vcpu);
1675 int kvm_fast_pio(struct kvm_vcpu *vcpu, int size, unsigned short port, int in);
1676 int kvm_emulate_cpuid(struct kvm_vcpu *vcpu);
1677 int kvm_emulate_halt(struct kvm_vcpu *vcpu);
1678 int kvm_vcpu_halt(struct kvm_vcpu *vcpu);
1679 int kvm_emulate_ap_reset_hold(struct kvm_vcpu *vcpu);
1680 int kvm_emulate_wbinvd(struct kvm_vcpu *vcpu);
1682 void kvm_get_segment(struct kvm_vcpu *vcpu, struct kvm_segment *var, int seg);
1683 int kvm_load_segment_descriptor(struct kvm_vcpu *vcpu, u16 selector, int seg);
1684 void kvm_vcpu_deliver_sipi_vector(struct kvm_vcpu *vcpu, u8 vector);
1686 int kvm_task_switch(struct kvm_vcpu *vcpu, u16 tss_selector, int idt_index,
1689 void kvm_free_guest_fpu(struct kvm_vcpu *vcpu);
1691 void kvm_post_set_cr0(struct kvm_vcpu *vcpu, unsigned long old_cr0, unsigned long cr0);
1692 void kvm_post_set_cr4(struct kvm_vcpu *vcpu, unsigned long old_cr4, unsigned long cr4);
1693 int kvm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0);
1694 int kvm_set_cr3(struct kvm_vcpu *vcpu, unsigned long cr3);
1695 int kvm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4);
1696 int kvm_set_cr8(struct kvm_vcpu *vcpu, unsigned long cr8);
1697 int kvm_set_dr(struct kvm_vcpu *vcpu, int dr, unsigned long val);
1698 void kvm_get_dr(struct kvm_vcpu *vcpu, int dr, unsigned long *val);
1699 unsigned long kvm_get_cr8(struct kvm_vcpu *vcpu);
1700 void kvm_lmsw(struct kvm_vcpu *vcpu, unsigned long msw);
1701 void kvm_get_cs_db_l_bits(struct kvm_vcpu *vcpu, int *db, int *l);
1702 int kvm_emulate_xsetbv(struct kvm_vcpu *vcpu);
1704 int kvm_get_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
1705 int kvm_set_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
1707 unsigned long kvm_get_rflags(struct kvm_vcpu *vcpu);
1708 void kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags);
1709 int kvm_emulate_rdpmc(struct kvm_vcpu *vcpu);
1711 void kvm_queue_exception(struct kvm_vcpu *vcpu, unsigned nr);
1712 void kvm_queue_exception_e(struct kvm_vcpu *vcpu, unsigned nr, u32 error_code);
1713 void kvm_queue_exception_p(struct kvm_vcpu *vcpu, unsigned nr, unsigned long payload);
1714 void kvm_requeue_exception(struct kvm_vcpu *vcpu, unsigned nr);
1715 void kvm_requeue_exception_e(struct kvm_vcpu *vcpu, unsigned nr, u32 error_code);
1716 void kvm_inject_page_fault(struct kvm_vcpu *vcpu, struct x86_exception *fault);
1717 bool kvm_inject_emulated_page_fault(struct kvm_vcpu *vcpu,
1719 int kvm_read_guest_page_mmu(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
1722 bool kvm_require_cpl(struct kvm_vcpu *vcpu, int required_cpl);
1723 bool kvm_require_dr(struct kvm_vcpu *vcpu, int dr);
1744 void kvm_inject_nmi(struct kvm_vcpu *vcpu);
1746 void kvm_update_dr7(struct kvm_vcpu *vcpu);
1749 void __kvm_mmu_free_some_pages(struct kvm_vcpu *vcpu);
1750 void kvm_mmu_free_roots(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
1752 void kvm_mmu_free_guest_mode_roots(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu);
1753 gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
1755 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva,
1757 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva,
1759 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva,
1761 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva,
1765 void kvm_vcpu_update_apicv(struct kvm_vcpu *vcpu);
1772 int kvm_emulate_hypercall(struct kvm_vcpu *vcpu);
1774 int kvm_mmu_page_fault(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa, u64 error_code,
1776 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva);
1777 void kvm_mmu_invalidate_gva(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
1779 void kvm_mmu_invpcid_gva(struct kvm_vcpu *vcpu, gva_t gva, unsigned long pcid);
1780 void kvm_mmu_new_pgd(struct kvm_vcpu *vcpu, gpa_t new_pgd);
1807 static inline void kvm_inject_gp(struct kvm_vcpu *vcpu, u32 error_code) in kvm_inject_gp()
1841 int kvm_cpu_has_injectable_intr(struct kvm_vcpu *v);
1842 int kvm_cpu_has_interrupt(struct kvm_vcpu *vcpu);
1843 int kvm_cpu_has_extint(struct kvm_vcpu *v);
1844 int kvm_arch_interrupt_allowed(struct kvm_vcpu *vcpu);
1845 int kvm_cpu_get_interrupt(struct kvm_vcpu *v);
1846 void kvm_vcpu_reset(struct kvm_vcpu *vcpu, bool init_event);
1847 void kvm_vcpu_reload_apic_access_page(struct kvm_vcpu *vcpu);
1862 u64 kvm_scale_tsc(struct kvm_vcpu *vcpu, u64 tsc, u64 ratio);
1863 u64 kvm_read_l1_tsc(struct kvm_vcpu *vcpu, u64 host_tsc);
1867 unsigned long kvm_get_linear_rip(struct kvm_vcpu *vcpu);
1868 bool kvm_is_linear_rip(struct kvm_vcpu *vcpu, unsigned long linear_rip);
1875 bool kvm_arch_async_page_not_present(struct kvm_vcpu *vcpu,
1877 void kvm_arch_async_page_present(struct kvm_vcpu *vcpu,
1879 void kvm_arch_async_page_ready(struct kvm_vcpu *vcpu,
1881 void kvm_arch_async_page_present_queued(struct kvm_vcpu *vcpu);
1882 bool kvm_arch_can_dequeue_async_page_present(struct kvm_vcpu *vcpu);
1883 extern bool kvm_find_async_pf_gfn(struct kvm_vcpu *vcpu, gfn_t gfn);
1885 int kvm_skip_emulated_instruction(struct kvm_vcpu *vcpu);
1886 int kvm_complete_insn_gp(struct kvm_vcpu *vcpu, int err);
1887 void __kvm_request_immediate_exit(struct kvm_vcpu *vcpu);
1893 bool kvm_vcpu_is_reset_bsp(struct kvm_vcpu *vcpu);
1894 bool kvm_vcpu_is_bsp(struct kvm_vcpu *vcpu);
1897 struct kvm_vcpu **dest_vcpu);
1909 static inline void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu) in kvm_arch_vcpu_blocking()
1914 static inline void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu) in kvm_arch_vcpu_unblocking()
1919 static inline void kvm_arch_vcpu_block_finish(struct kvm_vcpu *vcpu) {} in kvm_arch_vcpu_block_finish()