/Linux-v4.19/arch/powerpc/kvm/ |
D | booke_emulate.c | 37 vcpu->arch.regs.nip = vcpu->arch.shared->srr0; in kvmppc_emul_rfi() 38 kvmppc_set_msr(vcpu, vcpu->arch.shared->srr1); in kvmppc_emul_rfi() 43 vcpu->arch.regs.nip = vcpu->arch.dsrr0; in kvmppc_emul_rfdi() 44 kvmppc_set_msr(vcpu, vcpu->arch.dsrr1); in kvmppc_emul_rfdi() 49 vcpu->arch.regs.nip = vcpu->arch.csrr0; in kvmppc_emul_rfci() 50 kvmppc_set_msr(vcpu, vcpu->arch.csrr1); in kvmppc_emul_rfci() 91 kvmppc_set_gpr(vcpu, rt, vcpu->arch.shared->msr); in kvmppc_booke_emulate_op() 101 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op() 107 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op() 138 vcpu->arch.shared->dar = spr_val; in kvmppc_booke_emulate_mtspr() [all …]
|
D | book3s_hv_tm.c | 20 u64 msr = vcpu->arch.shregs.msr; in emulate_tx_failure() 22 tfiar = vcpu->arch.regs.nip & ~0x3ull; in emulate_tx_failure() 24 if (MSR_TM_SUSPENDED(vcpu->arch.shregs.msr)) in emulate_tx_failure() 30 vcpu->arch.tfiar = tfiar; in emulate_tx_failure() 32 vcpu->arch.texasr = (vcpu->arch.texasr & 0x3ffffff) | texasr; in emulate_tx_failure() 45 u32 instr = vcpu->arch.emul_inst; in kvmhv_p9_tm_emulation() 46 u64 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation() 53 newmsr = vcpu->arch.shregs.srr1; in kvmhv_p9_tm_emulation() 59 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation() 60 vcpu->arch.cfar = vcpu->arch.regs.nip - 4; in kvmhv_p9_tm_emulation() [all …]
|
D | timing.c | 38 mutex_lock(&vcpu->arch.exit_timing_lock); in kvmppc_init_timing_stats() 40 vcpu->arch.last_exit_type = 0xDEAD; in kvmppc_init_timing_stats() 42 vcpu->arch.timing_count_type[i] = 0; in kvmppc_init_timing_stats() 43 vcpu->arch.timing_max_duration[i] = 0; in kvmppc_init_timing_stats() 44 vcpu->arch.timing_min_duration[i] = 0xFFFFFFFF; in kvmppc_init_timing_stats() 45 vcpu->arch.timing_sum_duration[i] = 0; in kvmppc_init_timing_stats() 46 vcpu->arch.timing_sum_quad_duration[i] = 0; in kvmppc_init_timing_stats() 48 vcpu->arch.timing_last_exit = 0; in kvmppc_init_timing_stats() 49 vcpu->arch.timing_exit.tv64 = 0; in kvmppc_init_timing_stats() 50 vcpu->arch.timing_last_enter.tv64 = 0; in kvmppc_init_timing_stats() [all …]
|
D | emulate_loadstore.c | 109 vcpu->arch.mmio_vsx_copy_nums = 0; in kvmppc_emulate_loadstore() 110 vcpu->arch.mmio_vsx_offset = 0; in kvmppc_emulate_loadstore() 111 vcpu->arch.mmio_copy_type = KVMPPC_VSX_COPY_NONE; in kvmppc_emulate_loadstore() 112 vcpu->arch.mmio_sp64_extend = 0; in kvmppc_emulate_loadstore() 113 vcpu->arch.mmio_sign_extend = 0; in kvmppc_emulate_loadstore() 114 vcpu->arch.mmio_vmx_copy_nums = 0; in kvmppc_emulate_loadstore() 115 vcpu->arch.mmio_vmx_offset = 0; in kvmppc_emulate_loadstore() 116 vcpu->arch.mmio_host_swabbed = 0; in kvmppc_emulate_loadstore() 119 vcpu->arch.regs.msr = vcpu->arch.shared->msr; in kvmppc_emulate_loadstore() 120 vcpu->arch.regs.ccr = vcpu->arch.cr; in kvmppc_emulate_loadstore() [all …]
|
D | booke.c | 80 printk("pc: %08lx msr: %08llx\n", vcpu->arch.regs.nip, in kvmppc_dump_vcpu() 81 vcpu->arch.shared->msr); in kvmppc_dump_vcpu() 82 printk("lr: %08lx ctr: %08lx\n", vcpu->arch.regs.link, in kvmppc_dump_vcpu() 83 vcpu->arch.regs.ctr); in kvmppc_dump_vcpu() 84 printk("srr0: %08llx srr1: %08llx\n", vcpu->arch.shared->srr0, in kvmppc_dump_vcpu() 85 vcpu->arch.shared->srr1); in kvmppc_dump_vcpu() 87 printk("exceptions: %08lx\n", vcpu->arch.pending_exceptions); in kvmppc_dump_vcpu() 105 vcpu->arch.shadow_msr &= ~MSR_SPE; in kvmppc_vcpu_disable_spe() 115 vcpu->arch.shadow_msr |= MSR_SPE; in kvmppc_vcpu_enable_spe() 121 if (vcpu->arch.shared->msr & MSR_SPE) { in kvmppc_vcpu_sync_spe() [all …]
|
D | book3s_hv_tm_builtin.c | 25 u32 instr = vcpu->arch.emul_inst; in kvmhv_p9_tm_emulation_early() 32 newmsr = vcpu->arch.shregs.srr1; in kvmhv_p9_tm_emulation_early() 37 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early() 38 vcpu->arch.cfar = vcpu->arch.regs.nip - 4; in kvmhv_p9_tm_emulation_early() 39 vcpu->arch.regs.nip = vcpu->arch.shregs.srr0; in kvmhv_p9_tm_emulation_early() 44 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early() 45 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early() 48 if (!(vcpu->arch.hfscr & HFSCR_EBB) || in kvmhv_p9_tm_emulation_early() 60 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation_early() 61 vcpu->arch.cfar = vcpu->arch.regs.nip - 4; in kvmhv_p9_tm_emulation_early() [all …]
|
D | book3s_hv.c | 223 cpu = READ_ONCE(vcpu->arch.thread_cpu); in kvmppc_fast_vcpu_kick_hv() 289 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmppc_core_vcpu_load_hv() 301 spin_lock_irqsave(&vcpu->arch.tbacct_lock, flags); in kvmppc_core_vcpu_load_hv() 302 if (vcpu->arch.state == KVMPPC_VCPU_BUSY_IN_HOST && in kvmppc_core_vcpu_load_hv() 303 vcpu->arch.busy_preempt != TB_NIL) { in kvmppc_core_vcpu_load_hv() 304 vcpu->arch.busy_stolen += mftb() - vcpu->arch.busy_preempt; in kvmppc_core_vcpu_load_hv() 305 vcpu->arch.busy_preempt = TB_NIL; in kvmppc_core_vcpu_load_hv() 307 spin_unlock_irqrestore(&vcpu->arch.tbacct_lock, flags); in kvmppc_core_vcpu_load_hv() 312 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmppc_core_vcpu_put_hv() 318 spin_lock_irqsave(&vcpu->arch.tbacct_lock, flags); in kvmppc_core_vcpu_put_hv() [all …]
|
D | book3s_emulate.c | 88 if (vcpu->arch.papr_enabled && (level > PRIV_SUPER)) in spr_allowed() 101 memcpy(&vcpu->arch.gpr_tm[0], &vcpu->arch.regs.gpr[0], in kvmppc_copyto_vcpu_tm() 102 sizeof(vcpu->arch.gpr_tm)); in kvmppc_copyto_vcpu_tm() 103 memcpy(&vcpu->arch.fp_tm, &vcpu->arch.fp, in kvmppc_copyto_vcpu_tm() 105 memcpy(&vcpu->arch.vr_tm, &vcpu->arch.vr, in kvmppc_copyto_vcpu_tm() 107 vcpu->arch.ppr_tm = vcpu->arch.ppr; in kvmppc_copyto_vcpu_tm() 108 vcpu->arch.dscr_tm = vcpu->arch.dscr; in kvmppc_copyto_vcpu_tm() 109 vcpu->arch.amr_tm = vcpu->arch.amr; in kvmppc_copyto_vcpu_tm() 110 vcpu->arch.ctr_tm = vcpu->arch.regs.ctr; in kvmppc_copyto_vcpu_tm() 111 vcpu->arch.tar_tm = vcpu->arch.tar; in kvmppc_copyto_vcpu_tm() [all …]
|
D | e500_emulate.c | 56 ulong param = vcpu->arch.regs.gpr[rb]; in kvmppc_e500_emul_msgclr() 62 clear_bit(prio, &vcpu->arch.pending_exceptions); in kvmppc_e500_emul_msgclr() 68 ulong param = vcpu->arch.regs.gpr[rb]; in kvmppc_e500_emul_msgsnd() 78 int cpir = cvcpu->arch.shared->pir; in kvmppc_e500_emul_msgsnd() 80 set_bit(prio, &cvcpu->arch.pending_exceptions); in kvmppc_e500_emul_msgsnd() 97 run->debug.arch.address = vcpu->arch.regs.nip; in kvmppc_e500_emul_ehpriv() 98 run->debug.arch.status = 0; in kvmppc_e500_emul_ehpriv() 229 vcpu->arch.shared->mas0 = spr_val; in kvmppc_core_emulate_mtspr_e500() 232 vcpu->arch.shared->mas1 = spr_val; in kvmppc_core_emulate_mtspr_e500() 235 vcpu->arch.shared->mas2 = spr_val; in kvmppc_core_emulate_mtspr_e500() [all …]
|
D | book3s_pr.c | 85 if (vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK) in kvmppc_fixup_split_real() 92 vcpu->arch.hflags |= BOOK3S_HFLAG_SPLIT_HACK; in kvmppc_fixup_split_real() 115 current->thread.kvm_shadow_vcpu = vcpu->arch.shadow_vcpu; in kvmppc_core_vcpu_load_pr() 156 svcpu->gpr[0] = vcpu->arch.regs.gpr[0]; in kvmppc_copy_to_svcpu() 157 svcpu->gpr[1] = vcpu->arch.regs.gpr[1]; in kvmppc_copy_to_svcpu() 158 svcpu->gpr[2] = vcpu->arch.regs.gpr[2]; in kvmppc_copy_to_svcpu() 159 svcpu->gpr[3] = vcpu->arch.regs.gpr[3]; in kvmppc_copy_to_svcpu() 160 svcpu->gpr[4] = vcpu->arch.regs.gpr[4]; in kvmppc_copy_to_svcpu() 161 svcpu->gpr[5] = vcpu->arch.regs.gpr[5]; in kvmppc_copy_to_svcpu() 162 svcpu->gpr[6] = vcpu->arch.regs.gpr[6]; in kvmppc_copy_to_svcpu() [all …]
|
D | e500mc.c | 105 vcpu->arch.pid = pid; in kvmppc_set_pid() 122 mtspr(SPRN_EPCR, vcpu->arch.shadow_epcr); in kvmppc_core_vcpu_load_e500mc() 124 mtspr(SPRN_MSRP, vcpu->arch.shadow_msrp); in kvmppc_core_vcpu_load_e500mc() 125 vcpu->arch.eplc = EPC_EGS | (get_lpid(vcpu) << EPC_ELPID_SHIFT); in kvmppc_core_vcpu_load_e500mc() 126 vcpu->arch.epsc = vcpu->arch.eplc; in kvmppc_core_vcpu_load_e500mc() 127 mtspr(SPRN_EPLC, vcpu->arch.eplc); in kvmppc_core_vcpu_load_e500mc() 128 mtspr(SPRN_EPSC, vcpu->arch.epsc); in kvmppc_core_vcpu_load_e500mc() 130 mtspr(SPRN_GIVPR, vcpu->arch.ivpr); in kvmppc_core_vcpu_load_e500mc() 131 mtspr(SPRN_GIVOR2, vcpu->arch.ivor[BOOKE_IRQPRIO_DATA_STORAGE]); in kvmppc_core_vcpu_load_e500mc() 132 mtspr(SPRN_GIVOR8, vcpu->arch.ivor[BOOKE_IRQPRIO_SYSCALL]); in kvmppc_core_vcpu_load_e500mc() [all …]
|
D | e500_mmu.c | 74 esel += gtlb0_set_base(vcpu_e500, vcpu->arch.shared->mas2); in get_tlb_esel() 137 tlbsel = (vcpu->arch.shared->mas4 >> 28) & 0x1; in kvmppc_e500_deliver_tlb_miss() 139 tsized = (vcpu->arch.shared->mas4 >> 7) & 0x1f; in kvmppc_e500_deliver_tlb_miss() 141 vcpu->arch.shared->mas0 = MAS0_TLBSEL(tlbsel) | MAS0_ESEL(victim) in kvmppc_e500_deliver_tlb_miss() 143 vcpu->arch.shared->mas1 = MAS1_VALID | (as ? MAS1_TS : 0) in kvmppc_e500_deliver_tlb_miss() 146 vcpu->arch.shared->mas2 = (eaddr & MAS2_EPN) in kvmppc_e500_deliver_tlb_miss() 147 | (vcpu->arch.shared->mas4 & MAS2_ATTRIB_MASK); in kvmppc_e500_deliver_tlb_miss() 148 vcpu->arch.shared->mas7_3 &= MAS3_U0 | MAS3_U1 | MAS3_U2 | MAS3_U3; in kvmppc_e500_deliver_tlb_miss() 149 vcpu->arch.shared->mas6 = (vcpu->arch.shared->mas6 & MAS6_SPID1) in kvmppc_e500_deliver_tlb_miss() 335 vcpu->arch.shared->mas0 &= ~MAS0_NV(~0); in kvmppc_e500_emul_tlbre() [all …]
|
/Linux-v4.19/arch/s390/kvm/ |
D | guestdbg.c | 62 u64 *cr9 = &vcpu->arch.sie_block->gcr[9]; in enable_all_hw_bp() 63 u64 *cr10 = &vcpu->arch.sie_block->gcr[10]; in enable_all_hw_bp() 64 u64 *cr11 = &vcpu->arch.sie_block->gcr[11]; in enable_all_hw_bp() 67 if (vcpu->arch.guestdbg.nr_hw_bp <= 0 || in enable_all_hw_bp() 68 vcpu->arch.guestdbg.hw_bp_info == NULL) in enable_all_hw_bp() 79 for (i = 0; i < vcpu->arch.guestdbg.nr_hw_bp; i++) { in enable_all_hw_bp() 80 start = vcpu->arch.guestdbg.hw_bp_info[i].addr; in enable_all_hw_bp() 81 len = vcpu->arch.guestdbg.hw_bp_info[i].len; in enable_all_hw_bp() 102 u64 *cr9 = &vcpu->arch.sie_block->gcr[9]; in enable_all_hw_wp() 103 u64 *cr10 = &vcpu->arch.sie_block->gcr[10]; in enable_all_hw_wp() [all …]
|
D | kvm-s390.c | 265 kvm_clock_sync_scb(vcpu->arch.sie_block, *delta); in kvm_clock_sync() 267 kvm->arch.epoch = vcpu->arch.sie_block->epoch; in kvm_clock_sync() 268 kvm->arch.epdx = vcpu->arch.sie_block->epdx; in kvm_clock_sync() 270 if (vcpu->arch.cputm_enabled) in kvm_clock_sync() 271 vcpu->arch.cputm_start += *delta; in kvm_clock_sync() 272 if (vcpu->arch.vsie_block) in kvm_clock_sync() 273 kvm_clock_sync_scb(vcpu->arch.vsie_block, in kvm_clock_sync() 528 struct gmap *gmap = kvm->arch.gmap; in kvm_s390_sync_dirty_log() 619 kvm->arch.use_irqchip = 1; in kvm_vm_ioctl_enable_cap() 624 kvm->arch.user_sigp = 1; in kvm_vm_ioctl_enable_cap() [all …]
|
/Linux-v4.19/arch/mips/kvm/ |
D | emulate.c | 46 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc() local 66 arch->gprs[insn.r_format.rd] = epc + 8; in kvm_compute_return_epc() 69 nextpc = arch->gprs[insn.r_format.rs]; in kvm_compute_return_epc() 85 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc() 94 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc() 103 arch->gprs[31] = epc + 8; in kvm_compute_return_epc() 104 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc() 113 arch->gprs[31] = epc + 8; in kvm_compute_return_epc() 114 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc() 142 arch->gprs[31] = instpc + 8; in kvm_compute_return_epc() [all …]
|
/Linux-v4.19/arch/powerpc/include/asm/ |
D | kvm_book3s_64.h | 48 return kvm->arch.radix; in kvm_is_radix() 422 if (atomic_read(&kvm->arch.hpte_mod_interest)) in note_hpte_modification() 485 vcpu->arch.cr = vcpu->arch.cr_tm; in copy_from_checkpoint() 486 vcpu->arch.regs.xer = vcpu->arch.xer_tm; in copy_from_checkpoint() 487 vcpu->arch.regs.link = vcpu->arch.lr_tm; in copy_from_checkpoint() 488 vcpu->arch.regs.ctr = vcpu->arch.ctr_tm; in copy_from_checkpoint() 489 vcpu->arch.amr = vcpu->arch.amr_tm; in copy_from_checkpoint() 490 vcpu->arch.ppr = vcpu->arch.ppr_tm; in copy_from_checkpoint() 491 vcpu->arch.dscr = vcpu->arch.dscr_tm; in copy_from_checkpoint() 492 vcpu->arch.tar = vcpu->arch.tar_tm; in copy_from_checkpoint() [all …]
|
/Linux-v4.19/scripts/ |
D | checkstack.pl | 40 my $arch = shift; 41 if ($arch eq "") { 42 $arch = `uname -m`; 43 chomp($arch); 49 if ($arch eq 'aarch64') { 52 } elsif ($arch eq 'arm') { 55 } elsif ($arch =~ /^x86(_64)?$/ || $arch =~ /^i[3456]86$/) { 61 } elsif ($arch eq 'ia64') { 64 } elsif ($arch eq 'm68k') { 68 } elsif ($arch eq 'mips64') { [all …]
|
/Linux-v4.19/arch/powerpc/kernel/ |
D | asm-offsets.c | 426 OFFSET(VCPU_HOST_STACK, kvm_vcpu, arch.host_stack); in main() 427 OFFSET(VCPU_HOST_PID, kvm_vcpu, arch.host_pid); in main() 428 OFFSET(VCPU_GUEST_PID, kvm_vcpu, arch.pid); in main() 429 OFFSET(VCPU_GPRS, kvm_vcpu, arch.regs.gpr); in main() 430 OFFSET(VCPU_VRSAVE, kvm_vcpu, arch.vrsave); in main() 431 OFFSET(VCPU_FPRS, kvm_vcpu, arch.fp.fpr); in main() 433 OFFSET(VCPU_VRS, kvm_vcpu, arch.vr.vr); in main() 435 OFFSET(VCPU_XER, kvm_vcpu, arch.regs.xer); in main() 436 OFFSET(VCPU_CTR, kvm_vcpu, arch.regs.ctr); in main() 437 OFFSET(VCPU_LR, kvm_vcpu, arch.regs.link); in main() [all …]
|
/Linux-v4.19/arch/arm64/kvm/ |
D | debug.c | 51 vcpu->arch.guest_debug_preserved.mdscr_el1 = val; in save_guest_debug_regs() 54 vcpu->arch.guest_debug_preserved.mdscr_el1); in save_guest_debug_regs() 59 u64 val = vcpu->arch.guest_debug_preserved.mdscr_el1; in restore_guest_debug_regs() 88 vcpu->arch.debug_ptr = &vcpu->arch.vcpu_debug_state; in kvm_arm_reset_debug_ptr() 114 bool trap_debug = !(vcpu->arch.flags & KVM_ARM64_DEBUG_DIRTY); in kvm_arm_setup_debug() 123 vcpu->arch.mdcr_el2 = __this_cpu_read(mdcr_el2) & MDCR_EL2_HPMN_MASK; in kvm_arm_setup_debug() 124 vcpu->arch.mdcr_el2 |= (MDCR_EL2_TPM | in kvm_arm_setup_debug() 133 vcpu->arch.mdcr_el2 |= MDCR_EL2_TDE; in kvm_arm_setup_debug() 186 vcpu->arch.debug_ptr = &vcpu->arch.external_debug_state; in kvm_arm_setup_debug() 187 vcpu->arch.flags |= KVM_ARM64_DEBUG_DIRTY; in kvm_arm_setup_debug() [all …]
|
/Linux-v4.19/arch/ia64/ |
D | Makefile | 25 KBUILD_LDFLAGS_MODULE += -T $(srctree)/arch/ia64/module.lds 33 GAS_STATUS = $(shell $(srctree)/arch/ia64/scripts/check-gas "$(CC)" "$(OBJDUMP)") 34 KBUILD_CPPFLAGS += $(shell $(srctree)/arch/ia64/scripts/toolchain-flags "$(CC)" "$(OBJDUMP)" "$(REA… 45 head-y := arch/ia64/kernel/head.o 47 libs-y += arch/ia64/lib/ 48 core-y += arch/ia64/kernel/ arch/ia64/mm/ 49 core-$(CONFIG_IA64_DIG) += arch/ia64/dig/ 50 core-$(CONFIG_IA64_DIG_VTD) += arch/ia64/dig/ 51 core-$(CONFIG_IA64_GENERIC) += arch/ia64/dig/ 52 core-$(CONFIG_IA64_HP_ZX1) += arch/ia64/dig/ [all …]
|
/Linux-v4.19/tools/perf/trace/beauty/ |
D | arch_errno_names.sh | 20 local arch="$1" 23 header="$toolsdir/arch/$arch/include/uapi/asm/errno.h" 33 local arch=$(arch_string "$1") 37 static const char *errno_to_name__$arch(int err) 57 local arch="$1" 58 local asm_errno=$(asm_errno_file "$arch") 64 |IFS=, create_errno_lookup_func "$arch" 71 local arch 75 for arch in $archlist; do 76 printf '\tif (!strcmp(arch, "%s"))\n' $(arch_string "$arch") [all …]
|
/Linux-v4.19/tools/perf/util/ |
D | env.c | 18 zfree(&env->arch); in perf_env__exit() 100 if (env->arch) in perf_env__read_arch() 104 env->arch = strdup(uts.machine); in perf_env__read_arch() 106 return env->arch ? 0 : -ENOMEM; in perf_env__read_arch() 119 return env && !perf_env__read_arch(env) ? env->arch : "unknown"; in perf_env__raw_arch() 138 static const char *normalize_arch(char *arch) in normalize_arch() argument 140 if (!strcmp(arch, "x86_64")) in normalize_arch() 142 if (arch[0] == 'i' && arch[2] == '8' && arch[3] == '6') in normalize_arch() 144 if (!strcmp(arch, "sun4u") || !strncmp(arch, "sparc", 5)) in normalize_arch() 146 if (!strcmp(arch, "aarch64") || !strcmp(arch, "arm64")) in normalize_arch() [all …]
|
/Linux-v4.19/arch/x86/tools/ |
D | Makefile | 16 reformatter = $(srctree)/arch/x86/tools/objdump_reformat.awk 17 chkobjdump = $(srctree)/arch/x86/tools/chkobjdump.awk 32 …st.o := -Wall -I$(objtree)/arch/x86/lib/ -I$(srctree)/arch/x86/include/uapi/ -I$(srctree)/arch/x86… 34 …STCFLAGS_insn_sanity.o := -Wall -I$(objtree)/arch/x86/lib/ -I$(srctree)/arch/x86/include/ -I$(srct… 37 …arch/x86/lib/insn.c $(srctree)/arch/x86/lib/inat.c $(srctree)/arch/x86/include/asm/inat_types.h $(… 39 …arch/x86/lib/insn.c $(srctree)/arch/x86/lib/inat.c $(srctree)/arch/x86/include/asm/inat_types.h $(…
|
/Linux-v4.19/arch/x86/kernel/ |
D | machine_kexec_32.c | 59 free_pages((unsigned long)image->arch.pgd, PGD_ALLOCATION_ORDER); in machine_kexec_free_page_tables() 60 image->arch.pgd = NULL; in machine_kexec_free_page_tables() 62 free_page((unsigned long)image->arch.pmd0); in machine_kexec_free_page_tables() 63 image->arch.pmd0 = NULL; in machine_kexec_free_page_tables() 64 free_page((unsigned long)image->arch.pmd1); in machine_kexec_free_page_tables() 65 image->arch.pmd1 = NULL; in machine_kexec_free_page_tables() 67 free_page((unsigned long)image->arch.pte0); in machine_kexec_free_page_tables() 68 image->arch.pte0 = NULL; in machine_kexec_free_page_tables() 69 free_page((unsigned long)image->arch.pte1); in machine_kexec_free_page_tables() 70 image->arch.pte1 = NULL; in machine_kexec_free_page_tables() [all …]
|
/Linux-v4.19/arch/riscv/kernel/ |
D | module-sections.c | 14 struct mod_section *got_sec = &mod->arch.got; in module_emit_got_entry() 33 struct mod_section *got_plt_sec = &mod->arch.got_plt; in module_emit_plt_entry() 35 struct mod_section *plt_sec = &mod->arch.plt; in module_emit_plt_entry() 99 mod->arch.plt.shdr = sechdrs + i; in module_frob_arch_sections() 101 mod->arch.got.shdr = sechdrs + i; in module_frob_arch_sections() 103 mod->arch.got_plt.shdr = sechdrs + i; in module_frob_arch_sections() 106 if (!mod->arch.plt.shdr) { in module_frob_arch_sections() 110 if (!mod->arch.got.shdr) { in module_frob_arch_sections() 114 if (!mod->arch.got_plt.shdr) { in module_frob_arch_sections() 135 mod->arch.plt.shdr->sh_type = SHT_NOBITS; in module_frob_arch_sections() [all …]
|