Lines Matching +full:l2 +full:- +full:cache
2 * arch/arm/mm/cache-feroceon-l2.c - Feroceon L2 cache controller support
11 * - Unified Layer 2 Cache for Feroceon CPU Cores,
12 * Document ID MV-S104858-00, Rev. A, October 23 2007.
22 #include <asm/hardware/cache-feroceon-l2.h>
27 * Low-level cache maintenance operations.
29 * As well as the regular 'clean/invalidate/flush L2 cache line by
30 * MVA' instructions, the Feroceon L2 cache controller also features
31 * 'clean/invalidate L2 range by MVA' operations.
33 * Cache range operations are initiated by writing the start and
35 * cache line whose first byte address lies in the inclusive range
38 * The cache range operations stall the CPU pipeline until completion.
51 * memory mapping afterwards (note: a cache flush may happen in l2_get_va()
79 * L2 is PIPT and range operations only do a TLB lookup on in l2_clean_pa_range()
85 va_end = va_start + (end - start); in l2_clean_pa_range()
110 * L2 is PIPT and range operations only do a TLB lookup on in l2_inv_pa_range()
116 va_end = va_start + (end - start); in l2_inv_pa_range()
134 * noninclusive, while the hardware cache range operations use
146 BUG_ON(start & (CACHE_LINE_SIZE - 1)); in calc_range_end()
147 BUG_ON(end & (CACHE_LINE_SIZE - 1)); in calc_range_end()
150 * Try to process all cache lines between 'start' and 'end'. in calc_range_end()
155 * Limit the number of cache lines processed at once, in calc_range_end()
156 * since cache range operations stall the CPU pipeline in calc_range_end()
163 * Cache range operations can't straddle a page boundary. in calc_range_end()
165 if (range_end > (start | (PAGE_SIZE - 1)) + 1) in calc_range_end()
166 range_end = (start | (PAGE_SIZE - 1)) + 1; in calc_range_end()
174 * Clean and invalidate partial first cache line. in feroceon_l2_inv_range()
176 if (start & (CACHE_LINE_SIZE - 1)) { in feroceon_l2_inv_range()
177 l2_clean_inv_pa(start & ~(CACHE_LINE_SIZE - 1)); in feroceon_l2_inv_range()
178 start = (start | (CACHE_LINE_SIZE - 1)) + 1; in feroceon_l2_inv_range()
182 * Clean and invalidate partial last cache line. in feroceon_l2_inv_range()
184 if (start < end && end & (CACHE_LINE_SIZE - 1)) { in feroceon_l2_inv_range()
185 l2_clean_inv_pa(end & ~(CACHE_LINE_SIZE - 1)); in feroceon_l2_inv_range()
186 end &= ~(CACHE_LINE_SIZE - 1); in feroceon_l2_inv_range()
190 * Invalidate all full cache lines between 'start' and 'end'. in feroceon_l2_inv_range()
194 l2_inv_pa_range(start, range_end - CACHE_LINE_SIZE); in feroceon_l2_inv_range()
204 * If L2 is forced to WT, the L2 will always be clean and we in feroceon_l2_clean_range()
208 start &= ~(CACHE_LINE_SIZE - 1); in feroceon_l2_clean_range()
209 end = (end + CACHE_LINE_SIZE - 1) & ~(CACHE_LINE_SIZE - 1); in feroceon_l2_clean_range()
212 l2_clean_pa_range(start, range_end - CACHE_LINE_SIZE); in feroceon_l2_clean_range()
222 start &= ~(CACHE_LINE_SIZE - 1); in feroceon_l2_flush_range()
223 end = (end + CACHE_LINE_SIZE - 1) & ~(CACHE_LINE_SIZE - 1); in feroceon_l2_flush_range()
227 l2_clean_pa_range(start, range_end - CACHE_LINE_SIZE); in feroceon_l2_flush_range()
228 l2_inv_pa_range(start, range_end - CACHE_LINE_SIZE); in feroceon_l2_flush_range()
237 * Routines to disable and re-enable the D-cache and I-cache at run
238 * time. These are necessary because the L2 cache can only be enabled
312 * Disable L2 Prefetch bit is set. in disable_l2_prefetch()
316 pr_info("Feroceon L2: Disabling L2 prefetch.\n"); in disable_l2_prefetch()
329 pr_info("Feroceon L2: Enabling L2\n"); in enable_l2()
341 "Feroceon L2: bootloader left the L2 cache on!\n"); in enable_l2()
356 pr_info("Feroceon L2: Cache support initialised%s.\n", in feroceon_l2_init()
361 { .compatible = "marvell,kirkwood-cache"},
362 { .compatible = "marvell,feroceon-cache"},
377 if (node && of_device_is_compatible(node, "marvell,kirkwood-cache")) { in feroceon_of_init()
380 return -ENOMEM; in feroceon_of_init()