Lines Matching +full:0 +full:xff000

53  * 0xFFF0000000000000	12-bit lpid field
54 * 0x000FFFFFFFFFF000 40-bit guest 4k page frame number
55 * 0x0000000000000001 1-bit single entry flag
57 #define RMAP_NESTED_LPID_MASK 0xFFF0000000000000UL
59 #define RMAP_NESTED_GPA_MASK 0x000FFFFFFFFFF000UL
60 #define RMAP_NESTED_IS_SINGLE_ENTRY 0x0000000000000001UL
160 * We use a lock bit in HPTE dword 0 to synchronize updates and
164 #define HPTE_V_HVLOCK 0x40UL
165 #define HPTE_V_ABSENT 0x20UL
189 asm volatile(" ldarx %0,0,%2\n" in try_lock_hpte()
190 " and. %1,%0,%3\n" in try_lock_hpte()
192 " or %0,%0,%4\n" in try_lock_hpte()
193 " stdcx. %0,0,%2\n" in try_lock_hpte()
200 return old == 0; in try_lock_hpte()
207 hpte[0] = cpu_to_be64(hpte_v); in unlock_hpte()
214 hpte[0] = cpu_to_be64(hpte_v); in __unlock_hpte()
227 lphi = (l >> 16) & 0xf; in kvmppc_hpte_page_shifts()
228 switch ((l >> 12) & 0xf) { in kvmppc_hpte_page_shifts()
229 case 0: in kvmppc_hpte_page_shifts()
230 return !lphi ? 24 : 0; /* 16MB */ in kvmppc_hpte_page_shifts()
236 return !lphi ? 34 : 0; /* 16GB */ in kvmppc_hpte_page_shifts()
248 return 0; in kvmppc_hpte_page_shifts()
253 return kvmppc_hpte_page_shifts(h, l) & 0xff; in kvmppc_hpte_base_page_shift()
260 if (tmp >= 0x100) in kvmppc_hpte_actual_page_shift()
271 return 0; in kvmppc_actual_pgsz()
280 return 0; in kvmppc_pgsize_lp_encoding()
284 return 0x38; in kvmppc_pgsize_lp_encoding()
296 return 0; in kvmppc_pgsize_lp_encoding()
305 unsigned long rb = 0, va_low, sllp; in compute_tlbie_rb()
308 if (a_pgshift >= 0x100) { in compute_tlbie_rb()
309 b_pgshift &= 0xff; in compute_tlbie_rb()
323 rb = (v & ~0x7fUL) << 16; /* AVA field */ in compute_tlbie_rb()
342 va_low &= 0x7ff; in compute_tlbie_rb()
349 rb |= (va_low & 0x7ff) << 12; /* remaining 11 bits of AVA */ in compute_tlbie_rb()
356 rb |= (va_low << b_pgshift) & 0x7ff000; in compute_tlbie_rb()
367 rb |= ((va_low << aval_shift) & 0xfe); in compute_tlbie_rb()
370 rb |= r & 0xff000 & ((1ul << a_pgshift) - 1); /* LP field */ in compute_tlbie_rb()
419 * return the PTE, otherwise return 0.
423 pte_t old_pte, new_pte = __pte(0); in kvmppc_read_update_linux_pte()
439 return __pte(0); in kvmppc_read_update_linux_pte()
503 unsigned long senc = 0; in slb_pgsize_encoding()
505 if (psize > 0x1000) { in slb_pgsize_encoding()
507 if (psize == 0x10000) in slb_pgsize_encoding()
515 return (hpte_v & ~0xffffffUL) == in is_vrma_hpte()
539 return rcu_dereference_raw_check(kvm->memslots[0]); in kvm_memslots_raw()
565 memset((char *)map + i / 8, 0xff, npages / 8); in set_dirty_bits()
575 memset((char *)map + i / 8, 0xff, npages / 8); in set_dirty_bits_atomic()