/Linux-v5.10/arch/sh/mm/ |
D | flush-sh4.c | 19 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region() 20 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region() 21 & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region() 22 cnt = (end - v) / L1_CACHE_BYTES; in sh4__flush_wback_region() 25 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 26 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 27 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 28 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 29 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() 30 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region() [all …]
|
D | cache-sh2a.c | 57 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region() 58 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_wback_region() 59 & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region() 70 for (v = begin; v < end; v += L1_CACHE_BYTES) { in sh2a__flush_wback_region() 78 for (v = begin; v < end; v += L1_CACHE_BYTES) in sh2a__flush_wback_region() 97 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region() 98 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_purge_region() 99 & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region() 104 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2a__flush_purge_region() 127 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_invalidate_region() [all …]
|
D | cache-sh2.c | 23 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region() 24 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_wback_region() 25 & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region() 26 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2__flush_wback_region() 44 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region() 45 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_purge_region() 46 & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region() 48 for (v = begin; v < end; v+=L1_CACHE_BYTES) in sh2__flush_purge_region() 75 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_invalidate_region() 76 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_invalidate_region() [all …]
|
D | cache-sh3.c | 38 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region() 39 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_wback_region() 40 & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region() 42 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_wback_region() 76 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region() 77 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_purge_region() 78 & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region() 80 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_purge_region()
|
/Linux-v5.10/arch/csky/mm/ |
D | cachev2.c | 26 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in icache_inv_range() 28 for (; i < end; i += L1_CACHE_BYTES) in icache_inv_range() 49 unsigned long i = param->start & ~(L1_CACHE_BYTES - 1); in local_icache_inv_range() 54 for (; i < param->end; i += L1_CACHE_BYTES) in local_icache_inv_range() 81 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dcache_wb_range() 83 for (; i < end; i += L1_CACHE_BYTES) in dcache_wb_range() 97 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dma_wbinv_range() 99 for (; i < end; i += L1_CACHE_BYTES) in dma_wbinv_range() 106 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dma_inv_range() 108 for (; i < end; i += L1_CACHE_BYTES) in dma_inv_range() [all …]
|
/Linux-v5.10/arch/hexagon/include/asm/ |
D | cache.h | 13 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 15 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 17 #define __cacheline_aligned __aligned(L1_CACHE_BYTES) 18 #define ____cacheline_aligned __aligned(L1_CACHE_BYTES)
|
/Linux-v5.10/arch/arm/lib/ |
D | copy_page.S | 14 #define COPY_COUNT (PAGE_SZ / (2 * L1_CACHE_BYTES) PLD( -1 )) 27 PLD( pld [r1, #L1_CACHE_BYTES] ) 30 1: PLD( pld [r1, #2 * L1_CACHE_BYTES]) 31 PLD( pld [r1, #3 * L1_CACHE_BYTES]) 33 .rept (2 * L1_CACHE_BYTES / 16 - 1)
|
/Linux-v5.10/arch/powerpc/include/asm/ |
D | page_32.h | 16 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 54 WARN_ON((unsigned long)addr & (L1_CACHE_BYTES - 1)); in clear_page() 56 for (i = 0; i < PAGE_SIZE / L1_CACHE_BYTES; i++, addr += L1_CACHE_BYTES) in clear_page()
|
D | cache.h | 30 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 32 #define SMP_CACHE_BYTES L1_CACHE_BYTES 85 return L1_CACHE_BYTES; in l1_dcache_bytes() 95 return L1_CACHE_BYTES; in l1_icache_bytes()
|
/Linux-v5.10/arch/arc/kernel/ |
D | vmlinux.lds.S | 63 INIT_TEXT_SECTION(L1_CACHE_BYTES) 68 INIT_SETUP(L1_CACHE_BYTES) 79 PERCPU_SECTION(L1_CACHE_BYTES) 94 EXCEPTION_TABLE(L1_CACHE_BYTES) 104 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
|
/Linux-v5.10/arch/csky/kernel/ |
D | vmlinux.lds.S | 30 PERCPU_SECTION(L1_CACHE_BYTES) 54 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE) 105 EXCEPTION_TABLE(L1_CACHE_BYTES) 106 BSS_SECTION(L1_CACHE_BYTES, PAGE_SIZE, L1_CACHE_BYTES)
|
/Linux-v5.10/arch/alpha/include/asm/ |
D | cache.h | 11 # define L1_CACHE_BYTES 64 macro 17 # define L1_CACHE_BYTES 32 macro 21 #define SMP_CACHE_BYTES L1_CACHE_BYTES
|
/Linux-v5.10/arch/xtensa/include/asm/ |
D | cache.h | 17 #define L1_CACHE_BYTES XCHAL_DCACHE_LINESIZE macro 18 #define SMP_CACHE_BYTES L1_CACHE_BYTES 32 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/Linux-v5.10/include/linux/ |
D | cache.h | 9 #define L1_CACHE_ALIGN(x) __ALIGN_KERNEL(x, L1_CACHE_BYTES) 13 #define SMP_CACHE_BYTES L1_CACHE_BYTES 85 #define cache_line_size() L1_CACHE_BYTES
|
/Linux-v5.10/arch/nios2/kernel/ |
D | vmlinux.lds.S | 42 EXCEPTION_TABLE(L1_CACHE_BYTES) 48 PERCPU_SECTION(L1_CACHE_BYTES) 53 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
|
/Linux-v5.10/arch/parisc/include/asm/ |
D | cache.h | 16 #define L1_CACHE_BYTES 16 macro 21 #define SMP_CACHE_BYTES L1_CACHE_BYTES 23 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/Linux-v5.10/arch/c6x/include/asm/ |
D | cache.h | 36 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 45 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 46 #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
|
/Linux-v5.10/arch/powerpc/lib/ |
D | checksum_32.S | 123 CACHELINE_BYTES = L1_CACHE_BYTES 125 CACHELINE_MASK = (L1_CACHE_BYTES-1) 198 #if L1_CACHE_BYTES >= 32 200 #if L1_CACHE_BYTES >= 64 203 #if L1_CACHE_BYTES >= 128 258 #if L1_CACHE_BYTES >= 32 260 #if L1_CACHE_BYTES >= 64 263 #if L1_CACHE_BYTES >= 128
|
D | copy_32.S | 64 CACHELINE_BYTES = L1_CACHE_BYTES 66 CACHELINE_MASK = (L1_CACHE_BYTES-1) 214 #if L1_CACHE_BYTES >= 32 216 #if L1_CACHE_BYTES >= 64 219 #if L1_CACHE_BYTES >= 128 393 #if L1_CACHE_BYTES >= 32 395 #if L1_CACHE_BYTES >= 64 398 #if L1_CACHE_BYTES >= 128 451 #if L1_CACHE_BYTES >= 32 453 #if L1_CACHE_BYTES >= 64 [all …]
|
/Linux-v5.10/arch/nds32/include/asm/ |
D | cache.h | 7 #define L1_CACHE_BYTES 32 macro 10 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/Linux-v5.10/arch/m68k/include/asm/ |
D | cache.h | 10 #define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT) macro 12 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/Linux-v5.10/arch/microblaze/include/asm/ |
D | cache.h | 17 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 19 #define SMP_CACHE_BYTES L1_CACHE_BYTES
|
/Linux-v5.10/arch/arm/include/asm/ |
D | cache.h | 9 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 18 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
/Linux-v5.10/arch/ia64/include/asm/ |
D | cache.h | 13 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro 17 # define SMP_CACHE_BYTES L1_CACHE_BYTES
|
/Linux-v5.10/arch/arc/mm/ |
D | cache.c | 275 num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); in __cache_line_loop_v2() 282 paddr += L1_CACHE_BYTES; in __cache_line_loop_v2() 320 num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); in __cache_line_loop_v3() 342 paddr += L1_CACHE_BYTES; in __cache_line_loop_v3() 346 vaddr += L1_CACHE_BYTES; in __cache_line_loop_v3() 390 num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); in __cache_line_loop_v4() 410 paddr += L1_CACHE_BYTES; in __cache_line_loop_v4() 443 sz += L1_CACHE_BYTES - 1; in __cache_line_loop_v4() 1226 if (ic->line_len != L1_CACHE_BYTES) in arc_cache_init_master() 1228 ic->line_len, L1_CACHE_BYTES); in arc_cache_init_master() [all …]
|