Home
last modified time | relevance | path

Searched refs:L1_CACHE_BYTES (Results 1 – 25 of 191) sorted by relevance

12345678

/Linux-v4.19/arch/sh/mm/
Dflush-sh4.c19 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region()
20 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region()
21 & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region()
22 cnt = (end - v) / L1_CACHE_BYTES; in sh4__flush_wback_region()
25 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
26 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
27 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
28 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
29 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
30 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
[all …]
Dcache-sh2a.c58 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region()
59 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_wback_region()
60 & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region()
71 for (v = begin; v < end; v += L1_CACHE_BYTES) { in sh2a__flush_wback_region()
79 for (v = begin; v < end; v += L1_CACHE_BYTES) in sh2a__flush_wback_region()
98 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region()
99 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_purge_region()
100 & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region()
105 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2a__flush_purge_region()
128 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_invalidate_region()
[all …]
Dcache-sh2.c24 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region()
25 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_wback_region()
26 & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region()
27 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2__flush_wback_region()
45 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region()
46 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_purge_region()
47 & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region()
49 for (v = begin; v < end; v+=L1_CACHE_BYTES) in sh2__flush_purge_region()
76 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_invalidate_region()
77 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_invalidate_region()
[all …]
Dcache-sh3.c41 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region()
42 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_wback_region()
43 & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region()
45 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_wback_region()
79 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region()
80 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_purge_region()
81 & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region()
83 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_purge_region()
Dcache-sh5.c83 addr += L1_CACHE_BYTES; in sh64_icache_inv_kernel_range()
228 addr += L1_CACHE_BYTES; in sh64_icache_inv_current_user_range()
234 #define DUMMY_ALLOCO_AREA_SIZE ((L1_CACHE_BYTES << 10) + (1024 * 4))
345 magic_eaddr += L1_CACHE_BYTES; in sh64_dcache_purge_coloured_phy_page()
374 eaddr += L1_CACHE_BYTES; in sh64_dcache_purge_phy_page()
599 unsigned long end = (unsigned long)vaddr + L1_CACHE_BYTES; in sh5_flush_cache_sigtramp()
601 __flush_wback_region(vaddr, L1_CACHE_BYTES); in sh5_flush_cache_sigtramp()
/Linux-v4.19/arch/powerpc/include/asm/
Dcacheflush.h67 void *addr = (void *)(start & ~(L1_CACHE_BYTES - 1)); in flush_dcache_range()
68 unsigned long size = stop - (unsigned long)addr + (L1_CACHE_BYTES - 1); in flush_dcache_range()
71 for (i = 0; i < size >> L1_CACHE_SHIFT; i++, addr += L1_CACHE_BYTES) in flush_dcache_range()
83 void *addr = (void *)(start & ~(L1_CACHE_BYTES - 1)); in clean_dcache_range()
84 unsigned long size = stop - (unsigned long)addr + (L1_CACHE_BYTES - 1); in clean_dcache_range()
87 for (i = 0; i < size >> L1_CACHE_SHIFT; i++, addr += L1_CACHE_BYTES) in clean_dcache_range()
100 void *addr = (void *)(start & ~(L1_CACHE_BYTES - 1)); in invalidate_dcache_range()
101 unsigned long size = stop - (unsigned long)addr + (L1_CACHE_BYTES - 1); in invalidate_dcache_range()
104 for (i = 0; i < size >> L1_CACHE_SHIFT; i++, addr += L1_CACHE_BYTES) in invalidate_dcache_range()
Dpage_32.h16 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
51 for (i = 0; i < PAGE_SIZE / L1_CACHE_BYTES; i++, addr += L1_CACHE_BYTES) in clear_page()
/Linux-v4.19/arch/hexagon/include/asm/
Dcache.h26 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
28 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
30 #define __cacheline_aligned __aligned(L1_CACHE_BYTES)
31 #define ____cacheline_aligned __aligned(L1_CACHE_BYTES)
/Linux-v4.19/arch/arm/lib/
Dcopy_page.S17 #define COPY_COUNT (PAGE_SZ / (2 * L1_CACHE_BYTES) PLD( -1 ))
30 PLD( pld [r1, #L1_CACHE_BYTES] )
33 1: PLD( pld [r1, #2 * L1_CACHE_BYTES])
34 PLD( pld [r1, #3 * L1_CACHE_BYTES])
36 .rept (2 * L1_CACHE_BYTES / 16 - 1)
/Linux-v4.19/arch/arc/kernel/
Dvmlinux.lds.S66 INIT_TEXT_SECTION(L1_CACHE_BYTES)
71 INIT_SETUP(L1_CACHE_BYTES)
83 PERCPU_SECTION(L1_CACHE_BYTES)
98 EXCEPTION_TABLE(L1_CACHE_BYTES)
108 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/Linux-v4.19/arch/alpha/include/asm/
Dcache.h11 # define L1_CACHE_BYTES 64 macro
17 # define L1_CACHE_BYTES 32 macro
21 #define SMP_CACHE_BYTES L1_CACHE_BYTES
/Linux-v4.19/arch/xtensa/include/asm/
Dcache.h17 #define L1_CACHE_BYTES XCHAL_DCACHE_LINESIZE macro
18 #define SMP_CACHE_BYTES L1_CACHE_BYTES
32 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/Linux-v4.19/arch/parisc/include/asm/
Dcache.h15 #define L1_CACHE_BYTES 16 macro
20 #define SMP_CACHE_BYTES L1_CACHE_BYTES
22 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/Linux-v4.19/arch/unicore32/kernel/
Dvmlinux.lds.S33 PERCPU_SECTION(L1_CACHE_BYTES)
50 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
53 EXCEPTION_TABLE(L1_CACHE_BYTES)
/Linux-v4.19/include/linux/
Dcache.h9 #define L1_CACHE_ALIGN(x) __ALIGN_KERNEL(x, L1_CACHE_BYTES)
13 #define SMP_CACHE_BYTES L1_CACHE_BYTES
79 #define cache_line_size() L1_CACHE_BYTES
/Linux-v4.19/arch/nios2/kernel/
Dvmlinux.lds.S55 EXCEPTION_TABLE(L1_CACHE_BYTES)
61 PERCPU_SECTION(L1_CACHE_BYTES)
66 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/Linux-v4.19/arch/riscv/kernel/
Dvmlinux.lds.S44 PERCPU_SECTION(L1_CACHE_BYTES)
63 RO_DATA_SECTION(L1_CACHE_BYTES)
68 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/Linux-v4.19/arch/c6x/include/asm/
Dcache.h39 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
48 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
49 #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
/Linux-v4.19/arch/powerpc/lib/
Dcopy_32.S67 CACHELINE_BYTES = L1_CACHE_BYTES
69 CACHELINE_MASK = (L1_CACHE_BYTES-1)
214 #if L1_CACHE_BYTES >= 32
216 #if L1_CACHE_BYTES >= 64
219 #if L1_CACHE_BYTES >= 128
391 #if L1_CACHE_BYTES >= 32
393 #if L1_CACHE_BYTES >= 64
396 #if L1_CACHE_BYTES >= 128
449 #if L1_CACHE_BYTES >= 32
451 #if L1_CACHE_BYTES >= 64
[all …]
Dchecksum_32.S129 CACHELINE_BYTES = L1_CACHE_BYTES
131 CACHELINE_MASK = (L1_CACHE_BYTES-1)
207 #if L1_CACHE_BYTES >= 32
209 #if L1_CACHE_BYTES >= 64
212 #if L1_CACHE_BYTES >= 128
283 #if L1_CACHE_BYTES >= 32
285 #if L1_CACHE_BYTES >= 64
288 #if L1_CACHE_BYTES >= 128
/Linux-v4.19/arch/nds32/include/asm/
Dcache.h7 #define L1_CACHE_BYTES 32 macro
10 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/Linux-v4.19/arch/unicore32/include/asm/
Dcache.h16 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
25 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/Linux-v4.19/arch/m68k/include/asm/
Dcache.h10 #define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT) macro
12 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/Linux-v4.19/arch/microblaze/include/asm/
Dcache.h20 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
22 #define SMP_CACHE_BYTES L1_CACHE_BYTES
/Linux-v4.19/arch/arm/include/asm/
Dcache.h9 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
18 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES

12345678