Lines Matching +full:high +full:- +full:vt
20 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
56 #define STEALCLOCK_NOT_AVAILABLE (-1)
102 /* only for little-endian */
117 return eax != (uint32_t)-1 && ebx == VMWARE_HYPERVISOR_MAGIC; in __vmware_platform()
137 early_param("no-vmw-sched-clock", setup_vmw_sched_clock);
144 early_param("no-steal-acc", parse_no_stealacc);
152 ns -= vmware_cyc2ns.cyc2ns_offset; in vmware_sched_clock()
161 clocks_calc_mult_shift(&d->cyc2ns_mul, &d->cyc2ns_shift, in vmware_cyc2ns_setup()
163 d->cyc2ns_offset = mul_u64_u32_shr(tsc_now, d->cyc2ns_mul, in vmware_cyc2ns_setup()
164 d->cyc2ns_shift); in vmware_cyc2ns_setup()
166 pr_info("using clock offset of %llu ns\n", d->cyc2ns_offset); in vmware_cyc2ns_setup()
208 * vmware_steal_clock() - read the per-cpu steal clock
211 * The function reads the steal clock if we are on a 64-bit system, otherwise
212 * reads it in parts, checking that the high part didn't change in the
224 clock = READ_ONCE(steal->clock); in vmware_steal_clock()
226 uint32_t initial_high, low, high; in vmware_steal_clock() local
229 initial_high = READ_ONCE(steal->clock_high); in vmware_steal_clock()
230 /* Do not reorder initial_high and high readings */ in vmware_steal_clock()
232 low = READ_ONCE(steal->clock_low); in vmware_steal_clock()
235 high = READ_ONCE(steal->clock_high); in vmware_steal_clock()
236 } while (initial_high != high); in vmware_steal_clock()
238 clock = ((uint64_t)high << 32) | low; in vmware_steal_clock()
258 pr_info("vmware-stealtime: cpu %d, pa %llx\n", in vmware_register_steal_time()
474 /* Checks if hypervisor supports x2apic without VT-D interrupt remapping. */
488 ghcb_set_rip(ghcb, regs->ip); in vmware_sev_es_hcall_prepare()
489 ghcb_set_rbx(ghcb, regs->bx); in vmware_sev_es_hcall_prepare()
490 ghcb_set_rcx(ghcb, regs->cx); in vmware_sev_es_hcall_prepare()
491 ghcb_set_rdx(ghcb, regs->dx); in vmware_sev_es_hcall_prepare()
492 ghcb_set_rsi(ghcb, regs->si); in vmware_sev_es_hcall_prepare()
493 ghcb_set_rdi(ghcb, regs->di); in vmware_sev_es_hcall_prepare()
494 ghcb_set_rbp(ghcb, regs->bp); in vmware_sev_es_hcall_prepare()
507 regs->bx = ghcb_get_rbx(ghcb); in vmware_sev_es_hcall_finish()
508 regs->cx = ghcb_get_rcx(ghcb); in vmware_sev_es_hcall_finish()
509 regs->dx = ghcb_get_rdx(ghcb); in vmware_sev_es_hcall_finish()
510 regs->si = ghcb_get_rsi(ghcb); in vmware_sev_es_hcall_finish()
511 regs->di = ghcb_get_rdi(ghcb); in vmware_sev_es_hcall_finish()
512 regs->bp = ghcb_get_rbp(ghcb); in vmware_sev_es_hcall_finish()