1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_BARRIER_H
3 #define __ASM_BARRIER_H
4
5 #ifndef __ASSEMBLY__
6
7 #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
8
9 #if __LINUX_ARM_ARCH__ >= 7 || \
10 (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
11 #define sev() __asm__ __volatile__ ("sev" : : : "memory")
12 #define wfe() __asm__ __volatile__ ("wfe" : : : "memory")
13 #define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
14 #endif
15
16 #if __LINUX_ARM_ARCH__ >= 7
17 #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
18 #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
19 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
20 #ifdef CONFIG_THUMB2_KERNEL
21 #define CSDB ".inst.w 0xf3af8014"
22 #else
23 #define CSDB ".inst 0xe320f014"
24 #endif
25 #define csdb() __asm__ __volatile__(CSDB : : : "memory")
26 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
27 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
28 : : "r" (0) : "memory")
29 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
30 : : "r" (0) : "memory")
31 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
32 : : "r" (0) : "memory")
33 #elif defined(CONFIG_CPU_FA526)
34 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
35 : : "r" (0) : "memory")
36 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
37 : : "r" (0) : "memory")
38 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
39 #else
40 #define isb(x) __asm__ __volatile__ ("" : : : "memory")
41 #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
42 : : "r" (0) : "memory")
43 #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
44 #endif
45
46 #ifndef CSDB
47 #define CSDB
48 #endif
49 #ifndef csdb
50 #define csdb()
51 #endif
52
53 #ifdef CONFIG_ARM_HEAVY_MB
54 extern void (*soc_mb)(void);
55 extern void arm_heavy_mb(void);
56 #define __arm_heavy_mb(x...) do { dsb(x); arm_heavy_mb(); } while (0)
57 #else
58 #define __arm_heavy_mb(x...) dsb(x)
59 #endif
60
61 #if defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
62 #define mb() __arm_heavy_mb()
63 #define rmb() dsb()
64 #define wmb() __arm_heavy_mb(st)
65 #define dma_rmb() dmb(osh)
66 #define dma_wmb() dmb(oshst)
67 #else
68 #define mb() barrier()
69 #define rmb() barrier()
70 #define wmb() barrier()
71 #define dma_rmb() barrier()
72 #define dma_wmb() barrier()
73 #endif
74
75 #define __smp_mb() dmb(ish)
76 #define __smp_rmb() __smp_mb()
77 #define __smp_wmb() dmb(ishst)
78
79 #ifdef CONFIG_CPU_SPECTRE
array_index_mask_nospec(unsigned long idx,unsigned long sz)80 static inline unsigned long array_index_mask_nospec(unsigned long idx,
81 unsigned long sz)
82 {
83 unsigned long mask;
84
85 asm volatile(
86 "cmp %1, %2\n"
87 " sbc %0, %1, %1\n"
88 CSDB
89 : "=r" (mask)
90 : "r" (idx), "Ir" (sz)
91 : "cc");
92
93 return mask;
94 }
95 #define array_index_mask_nospec array_index_mask_nospec
96 #endif
97
98 #include <asm-generic/barrier.h>
99
100 #endif /* !__ASSEMBLY__ */
101 #endif /* __ASM_BARRIER_H */
102