Lines Matching +full:0 +full:a
25 #define XTENSA_MMU_PTE_VPN_MASK 0xFFFFF000U
28 #define XTENSA_MMU_PTE_PPN_MASK 0xFFFFF000U
31 #define XTENSA_MMU_PTE_ATTR_MASK 0x0000000FU
34 #define XTENSA_MMU_PTE_ATTR_CACHED_MASK 0x0000000CU
37 #define XTENSA_MMU_L1_MASK 0x3FF00000U
40 #define XTENSA_MMU_L2_MASK 0x3FFFFFU
42 #define XTENSA_MMU_PTEBASE_MASK 0xFFC00000
48 #define XTENSA_MMU_PTE_RING_MASK 0x00000030U
57 #define XTENSA_MMU_PTE_SW_MASK 0x00000FC0U
66 /** Construct a page table entry (PTE) */
73 /** Get the attributes from a PTE */
77 /** Set the attributes in a PTE */
81 /** Set the SW field in a PTE */
85 /** Get the SW field from a PTE */
89 /** Set the ring in a PTE */
94 /** Get the ring from a PTE */
98 /** Get the ASID from the RASID register corresponding to the ring in a PTE */
101 >> XTENSA_MMU_PTE_RING_SHIFT) * 8)) & 0xFF)
103 /** Calculate the L2 page table position from a virtual address */
107 /** Calculate the L1 page table position from a virtual address */
120 #define XTENSA_MMU_PAGE_TABLE_ATTR 0
133 #define XTENSA_MMU_KERNEL_RING 0
141 /** Number of data TLB ways [0-9] */
144 /** Number of instruction TLB ways [0-6] */
177 * Find the PTE entry address of a given vaddr.
179 * For example, assuming PTEVADDR in 0xE0000000,
180 * the page spans from 0xE0000000 - 0xE03FFFFF
182 * address 0x00 is in 0xE0000000
183 * address 0x1000 is in 0xE0000004
185 * address 0xE0000000 (where the page is) is in 0xE0380000
195 * Get ASID for a given ring from RASID register.
200 (((rasid) >> ((ring) * 8)) & 0xff)
209 __asm__ volatile("wsr %0, rasid\n\t" in xtensa_rasid_set()
210 "isync\n" : : "a"(rasid)); in xtensa_rasid_set()
222 __asm__ volatile("rsr %0, rasid" : "=a"(rasid)); in xtensa_rasid_get()
227 * @brief Set a ring in RASID register to be particular value.
236 rasid = (rasid & ~(0xff << (ring * 8))) | ((uint32_t)asid << (ring * 8)); in xtensa_rasid_asid_set()
242 * @brief Invalidate a particular instruction TLB entry.
248 __asm__ volatile("iitlb %0\n\t" in xtensa_itlb_entry_invalidate()
249 : : "a" (entry)); in xtensa_itlb_entry_invalidate()
253 * @brief Synchronously invalidate of a particular instruction TLB entry.
259 __asm__ volatile("iitlb %0\n\t" in xtensa_itlb_entry_invalidate_sync()
261 : : "a" (entry)); in xtensa_itlb_entry_invalidate_sync()
265 * @brief Synchronously invalidate of a particular data TLB entry.
271 __asm__ volatile("idtlb %0\n\t" in xtensa_dtlb_entry_invalidate_sync()
273 : : "a" (entry)); in xtensa_dtlb_entry_invalidate_sync()
277 * @brief Invalidate a particular data TLB entry.
283 __asm__ volatile("idtlb %0\n\t" in xtensa_dtlb_entry_invalidate()
284 : : "a" (entry)); in xtensa_dtlb_entry_invalidate()
288 * @brief Synchronously write to a particular data TLB entry.
295 __asm__ volatile("wdtlb %0, %1\n\t" in xtensa_dtlb_entry_write_sync()
297 : : "a" (pte), "a"(entry)); in xtensa_dtlb_entry_write_sync()
301 * @brief Write to a particular data TLB entry.
308 __asm__ volatile("wdtlb %0, %1\n\t" in xtensa_dtlb_entry_write()
309 : : "a" (pte), "a"(entry)); in xtensa_dtlb_entry_write()
313 * @brief Synchronously write to a particular instruction TLB entry.
320 __asm__ volatile("witlb %0, %1\n\t" in xtensa_itlb_entry_write()
321 : : "a" (pte), "a"(entry)); in xtensa_itlb_entry_write()
325 * @brief Synchronously write to a particular instruction TLB entry.
332 __asm__ volatile("witlb %0, %1\n\t" in xtensa_itlb_entry_write_sync()
334 : : "a" (pte), "a"(entry)); in xtensa_itlb_entry_write_sync()
345 * a previously-used page table change. It does not need to be called
355 for (way = 0; way < XTENSA_MMU_NUM_TLB_AUTOREFILL_WAYS; way++) { in xtensa_tlb_autorefill_invalidate()
356 for (i = 0; i < entries; i++) { in xtensa_tlb_autorefill_invalidate()
375 __asm__ volatile("wsr.ptevaddr %0" : : "a"((uint32_t)ptables)); in xtensa_ptevaddr_set()
389 __asm__ volatile("rsr.ptevaddr %0" : "=a" (ptables)); in xtensa_ptevaddr_get()
395 * @brief Get the virtual address associated with a particular data TLB entry.
403 __asm__ volatile("rdtlb0 %0, %1\n\t" : "=a" (vaddr) : "a" (entry)); in xtensa_dtlb_vaddr_read()
408 * @brief Get the physical address associated with a particular data TLB entry.
416 __asm__ volatile("rdtlb1 %0, %1\n\t" : "=a" (paddr) : "a" (entry)); in xtensa_dtlb_paddr_read()
421 * @brief Get the virtual address associated with a particular instruction TLB entry.
429 __asm__ volatile("ritlb0 %0, %1\n\t" : "=a" (vaddr), "+a" (entry)); in xtensa_itlb_vaddr_read()
434 * @brief Get the physical address associated with a particular instruction TLB entry.
442 __asm__ volatile("ritlb1 %0, %1\n\t" : "=a" (paddr), "+a" (entry)); in xtensa_itlb_paddr_read()
447 * @brief Probe for instruction TLB entry from a virtual address.
457 __asm__ __volatile__("pitlb %0, %1\n\t" : "=a" (ret) : "a" ((uint32_t)vaddr)); in xtensa_itlb_probe()
462 * @brief Probe for data TLB entry from a virtual address.
472 __asm__ __volatile__("pdtlb %0, %1\n\t" : "=a" (ret) : "a" ((uint32_t)vaddr)); in xtensa_dtlb_probe()
477 * @brief Invalidate an instruction TLB entry associated with a virtual address.
479 * This invalidated an instruction TLB entry associated with a virtual address
494 * @brief Invalidate a data TLB entry associated with a virtual address.
496 * This invalidated a data TLB entry associated with a virtual address
511 * @brief Tell hardware to use a page table very first time after boot.
518 * @brief Switch to a new page table.