Searched refs:SID_SHIFT (Results 1 – 15 of 15) sorted by relevance
/Linux-v4.19/arch/powerpc/include/asm/book3s/64/ |
D | mmu-hash.h | 255 return SID_SHIFT; in segment_shift() 433 mask = (1ul << (SID_SHIFT - VPN_SHIFT)) - 1; in hpt_hash() 434 hash = (vpn >> (SID_SHIFT - VPN_SHIFT)) ^ in hpt_hash() 555 #define ESID_BITS (VA_BITS - (SID_SHIFT + CONTEXT_BITS)) 581 #define MAX_USER_CONTEXT_65BIT_VA ((ASM_CONST(1) << (65 - (SID_SHIFT + ESID_BITS))) - 2) 609 #define VSID_BITS_256M (VA_BITS - SID_SHIFT) 610 #define VSID_BITS_65_256M (65 - SID_SHIFT) 623 #define USER_VSID_RANGE (1UL << (ESID_BITS + SID_SHIFT)) 724 vsid_bits = va_bits - SID_SHIFT; in get_vsid() 726 ((ea >> SID_SHIFT) & ESID_BITS_MASK); in get_vsid()
|
/Linux-v4.19/arch/powerpc/kvm/ |
D | book3s_32_mmu.c | 47 #ifndef SID_SHIFT 48 #define SID_SHIFT 28 macro 100 kvmppc_mmu_book3s_32_esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_ea_to_vp() 174 eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_xlate_bat() 350 kvmppc_mmu_map_segment(vcpu, srnum << SID_SHIFT); in kvmppc_mmu_book3s_32_mtsrin() 366 ulong ea = esid << SID_SHIFT; in kvmppc_mmu_book3s_32_esid_to_vsid()
|
D | book3s_64_mmu.c | 90 return slbe->tb ? SID_SHIFT_1T : SID_SHIFT; in kvmppc_slb_sid_shift() 435 kvmppc_mmu_map_segment(vcpu, esid << SID_SHIFT); in kvmppc_mmu_book3s_64_slbmte() 581 (mp_ea >> SID_SHIFT) == esid; in segment_contains_magic_page() 588 ulong ea = esid << SID_SHIFT; in kvmppc_mmu_book3s_64_esid_to_vsid() 601 gvsid <<= SID_SHIFT_1T - SID_SHIFT; in kvmppc_mmu_book3s_64_esid_to_vsid() 602 gvsid |= esid & ((1ul << (SID_SHIFT_1T - SID_SHIFT)) - 1); in kvmppc_mmu_book3s_64_esid_to_vsid() 650 unlikely(esid == (mp_ea >> SID_SHIFT)) && in kvmppc_mmu_book3s_64_esid_to_vsid()
|
D | book3s_32_mmu_host.c | 170 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page() 179 vpn = (vsid << (SID_SHIFT - VPN_SHIFT)) | in kvmppc_mmu_map_page() 316 u32 esid = eaddr >> SID_SHIFT; in kvmppc_mmu_map_segment()
|
D | book3s_64_mmu_host.c | 118 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page() 229 vcpu->arch.mmu.esid_to_vsid(vcpu, pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_unmap_page() 325 u64 esid = eaddr >> SID_SHIFT; in kvmppc_mmu_map_segment()
|
D | book3s_pr.c | 702 pte.vpage |= ((u64)VSID_REAL << (SID_SHIFT - 12)); in kvmppc_handle_pagefault() 711 vcpu->arch.mmu.esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_handle_pagefault() 714 pte.vpage |= ((u64)VSID_REAL_DR << (SID_SHIFT - 12)); in kvmppc_handle_pagefault() 716 pte.vpage |= ((u64)VSID_REAL_IR << (SID_SHIFT - 12)); in kvmppc_handle_pagefault() 1154 sr = svcpu->sr[kvmppc_get_pc(vcpu) >> SID_SHIFT]; in kvmppc_handle_exit_pr() 1200 sr = svcpu->sr[dar >> SID_SHIFT]; in kvmppc_handle_exit_pr()
|
/Linux-v4.19/arch/powerpc/include/asm/ |
D | page_64.h | 31 #define SID_SHIFT 28 macro 34 #define GET_ESID(x) (((x) >> SID_SHIFT) & SID_MASK)
|
D | kvm_book3s_32.h | 42 #define SID_SHIFT 28 macro
|
D | kvm_book3s_64.h | 234 va_low ^= v >> (SID_SHIFT - 16); in compute_tlbie_rb()
|
/Linux-v4.19/arch/powerpc/mm/ |
D | slb_low.S | 91 srdi r10,r3,SID_SHIFT /* get esid */ 317 srdi r10,r10,(SID_SHIFT_1T - SID_SHIFT) /* get 1T ESID */
|
D | slb.c | 248 << SID_SHIFT; /* EA */ in switch_slb()
|
D | hash_native_64.c | 709 *vpn = vsid << (SID_SHIFT - VPN_SHIFT) | seg_off >> VPN_SHIFT; in hpte_decode()
|
/Linux-v4.19/drivers/misc/cxl/ |
D | fault.c | 47 hash = (slb->esid >> SID_SHIFT) & mask; in find_free_sste()
|
/Linux-v4.19/arch/powerpc/kernel/ |
D | setup_64.c | 626 return 1UL << SID_SHIFT; in ppc64_bolted_size()
|
/Linux-v4.19/arch/powerpc/platforms/pseries/ |
D | lpar.c | 983 vsid_modulus = ((1UL << (va_bits - SID_SHIFT)) - 1); in vsid_unscramble()
|