1 /*
2 * Copyright (c) 2016-2021, ARM Limited and Contributors. All rights reserved.
3 * Portions copyright (c) 2021-2022, ProvenRun S.A.S. All rights reserved.
4 *
5 * SPDX-License-Identifier: BSD-3-Clause
6 */
7
8 #ifndef ARCH_HELPERS_H
9 #define ARCH_HELPERS_H
10
11 #include <assert.h>
12 #include <cdefs.h>
13 #include <stdbool.h>
14 #include <stdint.h>
15 #include <string.h>
16
17 #include <arch.h>
18
19 /**********************************************************************
20 * Macros which create inline functions to read or write CPU system
21 * registers
22 *********************************************************************/
23
24 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
25 static inline void write_## _name(u_register_t v) \
26 { \
27 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
28 }
29
30 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
31 static inline u_register_t read_ ## _name(void) \
32 { \
33 u_register_t v; \
34 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
35 return v; \
36 }
37
38 /*
39 * The undocumented %Q and %R extended asm are used to implemented the below
40 * 64 bit `mrrc` and `mcrr` instructions.
41 */
42
43 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \
44 static inline void write64_## _name(uint64_t v) \
45 { \
46 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
47 }
48
49 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \
50 static inline uint64_t read64_## _name(void) \
51 { uint64_t v; \
52 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
53 return v; \
54 }
55
56 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
57 static inline u_register_t read_ ## _name(void) \
58 { \
59 u_register_t v; \
60 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
61 return v; \
62 }
63
64 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
65 static inline void write_ ## _name(u_register_t v) \
66 { \
67 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
68 }
69
70 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \
71 static inline void write_ ## _name(const u_register_t v) \
72 { \
73 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \
74 }
75
76 /* Define read function for coproc register */
77 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \
78 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
79
80 /* Define write function for coproc register */
81 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \
82 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
83
84 /* Define read & write function for coproc register */
85 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \
86 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \
87 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
88
89 /* Define 64 bit read function for coproc register */
90 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \
91 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
92
93 /* Define 64 bit write function for coproc register */
94 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \
95 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
96
97 /* Define 64 bit read & write function for coproc register */
98 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \
99 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \
100 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
101
102 /* Define read & write function for system register */
103 #define DEFINE_SYSREG_RW_FUNCS(_name) \
104 _DEFINE_SYSREG_READ_FUNC(_name, _name) \
105 _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
106
107 /**********************************************************************
108 * Macros to create inline functions for tlbi operations
109 *********************************************************************/
110
111 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
112 static inline void tlbi##_op(void) \
113 { \
114 u_register_t v = 0; \
115 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
116 }
117
118 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
119 static inline void bpi##_op(void) \
120 { \
121 u_register_t v = 0; \
122 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
123 }
124
125 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
126 static inline void tlbi##_op(u_register_t v) \
127 { \
128 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
129 }
130
131 /* Define function for simple TLBI operation */
132 #define DEFINE_TLBIOP_FUNC(_op, ...) \
133 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
134
135 /* Define function for TLBI operation with register parameter */
136 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \
137 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
138
139 /* Define function for simple BPI operation */
140 #define DEFINE_BPIOP_FUNC(_op, ...) \
141 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
142
143 /**********************************************************************
144 * Macros to create inline functions for DC operations
145 *********************************************************************/
146 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
147 static inline void dc##_op(u_register_t v) \
148 { \
149 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
150 }
151
152 /* Define function for DC operation with register parameter */
153 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \
154 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
155
156 /**********************************************************************
157 * Macros to create inline functions for system instructions
158 *********************************************************************/
159 /* Define function for simple system instruction */
160 #define DEFINE_SYSOP_FUNC(_op) \
161 static inline void _op(void) \
162 { \
163 __asm__ (#_op); \
164 }
165
166
167 /* Define function for system instruction with type specifier */
168 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
169 static inline void _op ## _type(void) \
170 { \
171 __asm__ (#_op " " #_type : : : "memory"); \
172 }
173
174 /* Define function for system instruction with register parameter */
175 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
176 static inline void _op ## _type(u_register_t v) \
177 { \
178 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
179 }
180
181 void flush_dcache_range(uintptr_t addr, size_t size);
182 void clean_dcache_range(uintptr_t addr, size_t size);
183 void inv_dcache_range(uintptr_t addr, size_t size);
184 bool is_dcache_enabled(void);
185
186 void dcsw_op_louis(u_register_t op_type);
187 void dcsw_op_all(u_register_t op_type);
188
189 void disable_mmu_secure(void);
190 void disable_mmu_icache_secure(void);
191
192 DEFINE_SYSOP_FUNC(wfi)
193 DEFINE_SYSOP_FUNC(wfe)
194 DEFINE_SYSOP_FUNC(sev)
195 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
196 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
197 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
198
199 /* dmb ld is not valid for armv7/thumb machines */
200 #if ARM_ARCH_MAJOR != 7
201 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
202 #endif
203
204 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
205 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
206 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
207 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
208 DEFINE_SYSOP_FUNC(isb)
209
210 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
211 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
212
213 DEFINE_SYSREG_RW_FUNCS(spsr)
DEFINE_SYSREG_RW_FUNCS(cpsr)214 DEFINE_SYSREG_RW_FUNCS(cpsr)
215
216 /*******************************************************************************
217 * System register accessor prototypes
218 ******************************************************************************/
219 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
220 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
221 DEFINE_COPROCR_READ_FUNC(id_mmfr3, ID_MMFR3)
222 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
223 DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
224 DEFINE_COPROCR_READ_FUNC(id_dfr1, ID_DFR1)
225 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
226 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
227 DEFINE_COPROCR_READ_FUNC(isr, ISR)
228 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
229 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
230
231 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
232 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
233 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
234 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
235 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
236 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
237 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
238 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
239 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
240 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
241 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
242 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
243 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
244 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
245 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
246 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
247 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
248 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
249 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
250 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
251 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
252 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
253 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
254 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
255 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
256 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
257 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
258 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
259
260 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
261 CNTP_CTL_ENABLE_MASK)
262 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
263 CNTP_CTL_IMASK_MASK)
264 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
265 CNTP_CTL_ISTATUS_MASK)
266
267 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
268 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
269
270 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
271 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
272
273 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
274 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
275 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
276 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
277 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
278 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
279 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
280 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
281 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
282 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
283 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
284 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
285 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
286 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
287 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
288 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
289 DEFINE_COPROCR_WRITE_FUNC_64(icc_asgi1r, ICC_ASGI1R_EL1_64)
290
291 DEFINE_COPROCR_RW_FUNCS(sdcr, SDCR)
292 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
293 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
294 DEFINE_COPROCR_RW_FUNCS(pmcr, PMCR)
295
296 /*
297 * Address translation
298 */
299 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
300 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
301 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
302
303 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
304
305 /* AArch32 coproc registers for 32bit MMU descriptor support */
306 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
307 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
308 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
309
310 /* Coproc registers for 32bit AMU support */
311 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR)
312 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR)
313 DEFINE_COPROCR_RW_FUNCS(amcr, AMCR)
314
315 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
316 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
317 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
318 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
319
320 /* Coproc registers for 64bit AMU support */
321 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
322 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
323 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
324 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
325
326 /*
327 * TLBI operation prototypes
328 */
329 DEFINE_TLBIOP_FUNC(all, TLBIALL)
330 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
331 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
332 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
333 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
334 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
335
336 /*
337 * BPI operation prototypes.
338 */
339 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
340
341 /*
342 * DC operation prototypes
343 */
344 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
345 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
346 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
347 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC)
348 #else
349 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
350 #endif
351
352 /*
353 * DynamIQ Shared Unit power management
354 */
355 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN)
356
357 /*
358 * RNDR is AArch64 only, so just provide a placeholder here to make the
359 * linker happy.
360 */
361 static inline u_register_t read_rndr(void)
362 {
363 assert(1);
364
365 return 0;
366 }
367
368 /* Previously defined accessor functions with incomplete register names */
369 #define dsb() dsbsy()
370 #define dmb() dmbsy()
371
372 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
373 #if ARM_ARCH_MAJOR == 7
374 #define dmbld() dmb()
375 #endif
376
377 #define IS_IN_SECURE() \
378 (GET_NS_BIT(read_scr()) == 0)
379
380 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp)
381 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc)
382 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon)
383 #define IS_IN_EL2() IS_IN_HYP()
384 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
385 #define IS_IN_EL3() \
386 ((GET_M32(read_cpsr()) == MODE32_mon) || \
387 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
388
get_current_el(void)389 static inline unsigned int get_current_el(void)
390 {
391 if (IS_IN_EL3()) {
392 return 3U;
393 } else if (IS_IN_EL2()) {
394 return 2U;
395 } else {
396 return 1U;
397 }
398 }
399
400 /* Macros for compatibility with AArch64 system registers */
401 #define read_mpidr_el1() read_mpidr()
402
403 #define read_scr_el3() read_scr()
404 #define write_scr_el3(_v) write_scr(_v)
405
406 #define read_hcr_el2() read_hcr()
407 #define write_hcr_el2(_v) write_hcr(_v)
408
409 #define read_cpacr_el1() read_cpacr()
410 #define write_cpacr_el1(_v) write_cpacr(_v)
411
412 #define read_cntfrq_el0() read_cntfrq()
413 #define write_cntfrq_el0(_v) write_cntfrq(_v)
414 #define read_isr_el1() read_isr()
415
416 #define read_cntpct_el0() read64_cntpct()
417
418 #define read_ctr_el0() read_ctr()
419
420 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v)
421 #define write_icc_sgi1r(_v) write64_icc_sgi1r(_v)
422 #define write_icc_asgi1r(_v) write64_icc_asgi1r(_v)
423
424 #define read_daif() read_cpsr()
425 #define write_daif(flags) write_cpsr(flags)
426
427 #define read_cnthp_cval_el2() read64_cnthp_cval_el2()
428 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v)
429
430 #define read_amcntenset0_el0() read_amcntenset0()
431 #define read_amcntenset1_el0() read_amcntenset1()
432
433 /* Helper functions to manipulate CPSR */
enable_irq(void)434 static inline void enable_irq(void)
435 {
436 /*
437 * The compiler memory barrier will prevent the compiler from
438 * scheduling non-volatile memory access after the write to the
439 * register.
440 *
441 * This could happen if some initialization code issues non-volatile
442 * accesses to an area used by an interrupt handler, in the assumption
443 * that it is safe as the interrupts are disabled at the time it does
444 * that (according to program order). However, non-volatile accesses
445 * are not necessarily in program order relatively with volatile inline
446 * assembly statements (and volatile accesses).
447 */
448 COMPILER_BARRIER();
449 __asm__ volatile ("cpsie i");
450 isb();
451 }
452
enable_serror(void)453 static inline void enable_serror(void)
454 {
455 COMPILER_BARRIER();
456 __asm__ volatile ("cpsie a");
457 isb();
458 }
459
enable_fiq(void)460 static inline void enable_fiq(void)
461 {
462 COMPILER_BARRIER();
463 __asm__ volatile ("cpsie f");
464 isb();
465 }
466
disable_irq(void)467 static inline void disable_irq(void)
468 {
469 COMPILER_BARRIER();
470 __asm__ volatile ("cpsid i");
471 isb();
472 }
473
disable_serror(void)474 static inline void disable_serror(void)
475 {
476 COMPILER_BARRIER();
477 __asm__ volatile ("cpsid a");
478 isb();
479 }
480
disable_fiq(void)481 static inline void disable_fiq(void)
482 {
483 COMPILER_BARRIER();
484 __asm__ volatile ("cpsid f");
485 isb();
486 }
487
488 #endif /* ARCH_HELPERS_H */
489