1 /*
2 * FreeRTOS Kernel V11.1.0
3 * Copyright (C) 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 *
5 * SPDX-License-Identifier: MIT
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy of
8 * this software and associated documentation files (the "Software"), to deal in
9 * the Software without restriction, including without limitation the rights to
10 * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
11 * the Software, and to permit persons to whom the Software is furnished to do so,
12 * subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in all
15 * copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
19 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
20 * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
21 * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
22 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * https://www.FreeRTOS.org
25 * https://github.com/FreeRTOS
26 *
27 */
28
29 /* Standard includes. */
30 #include <stdint.h>
31
32 /* Defining MPU_WRAPPERS_INCLUDED_FROM_API_FILE ensures that PRIVILEGED_FUNCTION
33 * is defined correctly and privileged functions are placed in correct sections. */
34 #define MPU_WRAPPERS_INCLUDED_FROM_API_FILE
35
36 /* Portasm includes. */
37 #include "portasm.h"
38
39 /* System call numbers includes. */
40 #include "mpu_syscall_numbers.h"
41
42 /* MPU_WRAPPERS_INCLUDED_FROM_API_FILE is needed to be defined only for the
43 * header files. */
44 #undef MPU_WRAPPERS_INCLUDED_FROM_API_FILE
45
46 #if ( configENABLE_MPU == 1 )
47
vRestoreContextOfFirstTask(void)48 void vRestoreContextOfFirstTask( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
49 {
50 __asm volatile
51 (
52 " .extern pxCurrentTCB \n"
53 " .syntax unified \n"
54 " \n"
55 " program_mpu_first_task: \n"
56 " \n"
57 " ldr r3, =pxCurrentTCB \n" /* r3 = &pxCurrentTCB. */
58 " ldr r0, [r3] \n" /* r0 = pxCurrentTCB.*/
59 " adds r0, #4 \n" /* r0 = Second item in the TCB which is xMPUSettings. */
60 " \n"
61 " dmb \n" /* Complete outstanding transfers before disabling MPU. */
62 " ldr r1, =0xe000ed94 \n" /* r1 = 0xe000ed94 [Location of MPU_CTRL]. */
63 " ldr r2, [r1] \n" /* Read the value of MPU_CTRL. */
64 " movs r3, #1 \n" /* r3 = 1. */
65 " bics r2, r3 \n" /* r2 = r2 & ~r3 i.e. Clear the bit 0 in r2. */
66 " str r2, [r1] \n" /* Disable MPU. */
67 " \n"
68 " ldr r1, =0xe000ed9c \n" /* r1 = 0xe000ed9c [Location of RBAR]. */
69 " ldr r2, =0xe000eda0 \n" /* r2 = 0xe000eda0 [Location of RASR]. */
70 " \n"
71 " ldmia r0!, {r3-r4} \n" /* Read first set of RBAR/RASR registers from TCB. */
72 " str r3, [r1] \n" /* Program RBAR. */
73 " str r4, [r2] \n" /* Program RASR. */
74 " \n"
75 " ldmia r0!, {r3-r4} \n" /* Read second set of RBAR/RASR registers from TCB. */
76 " str r3, [r1] \n" /* Program RBAR. */
77 " str r4, [r2] \n" /* Program RASR. */
78 " \n"
79 " ldmia r0!, {r3-r4} \n" /* Read third set of RBAR/RASR registers from TCB. */
80 " str r3, [r1] \n" /* Program RBAR. */
81 " str r4, [r2] \n" /* Program RASR. */
82 " \n"
83 " ldmia r0!, {r3-r4} \n" /* Read fourth set of RBAR/RASR registers from TCB. */
84 " str r3, [r1] \n" /* Program RBAR. */
85 " str r4, [r2] \n" /* Program RASR. */
86 " \n"
87 " ldmia r0!, {r3-r4} \n" /* Read fifth set of RBAR/RASR registers from TCB. */
88 " str r3, [r1] \n" /* Program RBAR. */
89 " str r4, [r2] \n" /* Program RASR. */
90 " \n"
91 " ldr r1, =0xe000ed94 \n" /* r1 = 0xe000ed94 [Location of MPU_CTRL]. */
92 " ldr r2, [r1] \n" /* Read the value of MPU_CTRL. */
93 " movs r3, #1 \n" /* r3 = 1. */
94 " orrs r2, r3 \n" /* r2 = r2 | r3 i.e. Set the bit 0 in r2. */
95 " str r2, [r1] \n" /* Enable MPU. */
96 " dsb \n" /* Force memory writes before continuing. */
97 " \n"
98 " restore_context_first_task: \n"
99 " ldr r2, =pxCurrentTCB \n" /* r2 = &pxCurrentTCB. */
100 " ldr r0, [r2] \n" /* r0 = pxCurrentTCB.*/
101 " ldr r1, [r0] \n" /* r1 = Location of saved context in TCB. */
102 " \n"
103 " restore_special_regs_first_task: \n"
104 " subs r1, #12 \n"
105 " ldmia r1!, {r2-r4} \n" /* r2 = original PSP, r3 = CONTROL, r4 = LR. */
106 " subs r1, #12 \n"
107 " msr psp, r2 \n"
108 " msr control, r3 \n"
109 " mov lr, r4 \n"
110 " \n"
111 " restore_general_regs_first_task: \n"
112 " subs r1, #32 \n"
113 " ldmia r1!, {r4-r7} \n" /* r4-r7 contain half of the hardware saved context. */
114 " stmia r2!, {r4-r7} \n" /* Copy half of the the hardware saved context on the task stack. */
115 " ldmia r1!, {r4-r7} \n" /* r4-r7 contain rest half of the hardware saved context. */
116 " stmia r2!, {r4-r7} \n" /* Copy rest half of the the hardware saved context on the task stack. */
117 " subs r1, #48 \n"
118 " ldmia r1!, {r4-r7} \n" /* Restore r8-r11. */
119 " mov r8, r4 \n" /* r8 = r4. */
120 " mov r9, r5 \n" /* r9 = r5. */
121 " mov r10, r6 \n" /* r10 = r6. */
122 " mov r11, r7 \n" /* r11 = r7. */
123 " subs r1, #32 \n"
124 " ldmia r1!, {r4-r7} \n" /* Restore r4-r7. */
125 " subs r1, #16 \n"
126 " \n"
127 " restore_context_done_first_task: \n"
128 " str r1, [r0] \n" /* Save the location where the context should be saved next as the first member of TCB. */
129 " bx lr \n"
130 " \n"
131 " .align 4 \n"
132 ::"i" ( portSVC_START_SCHEDULER ) : "memory"
133 );
134 }
135
136 #else /* configENABLE_MPU */
137
vRestoreContextOfFirstTask(void)138 void vRestoreContextOfFirstTask( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
139 {
140 __asm volatile
141 (
142 " .extern pxCurrentTCB \n"
143 " .syntax unified \n"
144 " \n"
145 " ldr r2, =pxCurrentTCB \n" /* r2 = &pxCurrentTCB. */
146 " ldr r1, [r2] \n" /* r1 = pxCurrentTCB.*/
147 " ldr r0, [r1] \n" /* Read top of stack from TCB - The first item in pxCurrentTCB is the task top of stack. */
148 " \n"
149 " ldm r0!, {r2} \n" /* Read from stack - r2 = EXC_RETURN. */
150 " movs r1, #2 \n" /* r1 = 2. */
151 " msr CONTROL, r1 \n" /* Switch to use PSP in the thread mode. */
152 " adds r0, #32 \n" /* Discard everything up to r0. */
153 " msr psp, r0 \n" /* This is now the new top of stack to use in the task. */
154 " isb \n"
155 " bx r2 \n" /* Finally, branch to EXC_RETURN. */
156 " \n"
157 " .align 4 \n"
158 );
159 }
160
161 #endif /* configENABLE_MPU */
162
163 /*-----------------------------------------------------------*/
164
xIsPrivileged(void)165 BaseType_t xIsPrivileged( void ) /* __attribute__ (( naked )) */
166 {
167 __asm volatile
168 (
169 " .syntax unified \n"
170 " \n"
171 " mrs r0, control \n" /* r0 = CONTROL. */
172 " movs r1, #1 \n" /* r1 = 1. */
173 " tst r0, r1 \n" /* Perform r0 & r1 (bitwise AND) and update the conditions flag. */
174 " beq running_privileged \n" /* If the result of previous AND operation was 0, branch. */
175 " movs r0, #0 \n" /* CONTROL[0]!=0. Return false to indicate that the processor is not privileged. */
176 " bx lr \n" /* Return. */
177 " running_privileged: \n"
178 " movs r0, #1 \n" /* CONTROL[0]==0. Return true to indicate that the processor is privileged. */
179 " bx lr \n" /* Return. */
180 " \n"
181 " .align 4 \n"
182 ::: "r0", "r1", "memory"
183 );
184 }
185
186 /*-----------------------------------------------------------*/
187
vRaisePrivilege(void)188 void vRaisePrivilege( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
189 {
190 __asm volatile
191 (
192 " .syntax unified \n"
193 " \n"
194 " mrs r0, control \n" /* Read the CONTROL register. */
195 " movs r1, #1 \n" /* r1 = 1. */
196 " bics r0, r1 \n" /* Clear the bit 0. */
197 " msr control, r0 \n" /* Write back the new CONTROL value. */
198 " bx lr \n" /* Return to the caller. */
199 ::: "r0", "r1", "memory"
200 );
201 }
202
203 /*-----------------------------------------------------------*/
204
vResetPrivilege(void)205 void vResetPrivilege( void ) /* __attribute__ (( naked )) */
206 {
207 __asm volatile
208 (
209 " .syntax unified \n"
210 " \n"
211 " mrs r0, control \n" /* r0 = CONTROL. */
212 " movs r1, #1 \n" /* r1 = 1. */
213 " orrs r0, r1 \n" /* r0 = r0 | r1. */
214 " msr control, r0 \n" /* CONTROL = r0. */
215 " bx lr \n" /* Return to the caller. */
216 ::: "r0", "r1", "memory"
217 );
218 }
219
220 /*-----------------------------------------------------------*/
221
vStartFirstTask(void)222 void vStartFirstTask( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
223 {
224 /* Don't reset the MSP stack as is done on CM3/4 devices. The reason is that
225 * the Vector Table Offset Register (VTOR) is optional in CM0+ architecture
226 * and therefore, may not be available on all the devices. */
227 __asm volatile
228 (
229 " .syntax unified \n"
230 " cpsie i \n" /* Globally enable interrupts. */
231 " dsb \n"
232 " isb \n"
233 " svc %0 \n" /* System call to start the first task. */
234 " nop \n"
235 " \n"
236 " .align 4 \n"
237 ::"i" ( portSVC_START_SCHEDULER ) : "memory"
238 );
239 }
240
241 /*-----------------------------------------------------------*/
242
ulSetInterruptMask(void)243 uint32_t ulSetInterruptMask( void ) /* __attribute__(( naked )) PRIVILEGED_FUNCTION */
244 {
245 __asm volatile
246 (
247 " .syntax unified \n"
248 " \n"
249 " mrs r0, PRIMASK \n"
250 " cpsid i \n"
251 " bx lr \n"
252 ::: "memory"
253 );
254 }
255
256 /*-----------------------------------------------------------*/
257
vClearInterruptMask(uint32_t ulMask)258 void vClearInterruptMask( __attribute__( ( unused ) ) uint32_t ulMask ) /* __attribute__(( naked )) PRIVILEGED_FUNCTION */
259 {
260 __asm volatile
261 (
262 " .syntax unified \n"
263 " \n"
264 " msr PRIMASK, r0 \n"
265 " bx lr \n"
266 ::: "memory"
267 );
268 }
269
270 /*-----------------------------------------------------------*/
271
272 #if ( configENABLE_MPU == 1 )
273
PendSV_Handler(void)274 void PendSV_Handler( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
275 {
276 __asm volatile
277 (
278 " .extern pxCurrentTCB \n"
279 " .syntax unified \n"
280 " \n"
281 " ldr r2, =pxCurrentTCB \n" /* r2 = &( pxCurrentTCB ). */
282 " ldr r0, [r2] \n" /* r0 = pxCurrentTCB. */
283 " ldr r1, [r0] \n" /* r1 = Location in TCB where the context should be saved. */
284 " mrs r2, psp \n" /* r2 = PSP. */
285 " \n"
286 " save_general_regs: \n"
287 " stmia r1!, {r4-r7} \n" /* Store r4-r7. */
288 " mov r4, r8 \n" /* r4 = r8. */
289 " mov r5, r9 \n" /* r5 = r9. */
290 " mov r6, r10 \n" /* r6 = r10. */
291 " mov r7, r11 \n" /* r7 = r11. */
292 " stmia r1!, {r4-r7} \n" /* Store r8-r11. */
293 " ldmia r2!, {r4-r7} \n" /* Copy half of the hardware saved context into r4-r7. */
294 " stmia r1!, {r4-r7} \n" /* Store the hardware saved context. */
295 " ldmia r2!, {r4-r7} \n" /* Copy rest half of the hardware saved context into r4-r7. */
296 " stmia r1!, {r4-r7} \n" /* Store the hardware saved context. */
297 " \n"
298 " save_special_regs: \n"
299 " mrs r2, psp \n" /* r2 = PSP. */
300 " mrs r3, control \n" /* r3 = CONTROL. */
301 " mov r4, lr \n" /* r4 = LR. */
302 " stmia r1!, {r2-r4} \n" /* Store original PSP (after hardware has saved context), CONTROL and LR. */
303 " str r1, [r0] \n" /* Save the location from where the context should be restored as the first member of TCB. */
304 " \n"
305 " select_next_task: \n"
306 " cpsid i \n"
307 " bl vTaskSwitchContext \n"
308 " cpsie i \n"
309 " \n"
310 " program_mpu: \n"
311 " \n"
312 " ldr r2, =pxCurrentTCB \n" /* r2 = &( pxCurrentTCB ). */
313 " ldr r0, [r2] \n" /* r0 = pxCurrentTCB. */
314 " adds r0, #4 \n" /* r0 = Second item in the TCB which is xMPUSettings. */
315 " \n"
316 " dmb \n" /* Complete outstanding transfers before disabling MPU. */
317 " ldr r1, =0xe000ed94 \n" /* r1 = 0xe000ed94 [Location of MPU_CTRL]. */
318 " ldr r2, [r1] \n" /* Read the value of MPU_CTRL. */
319 " movs r3, #1 \n" /* r3 = 1. */
320 " bics r2, r3 \n" /* r2 = r2 & ~r3 i.e. Clear the bit 0 in r2. */
321 " str r2, [r1] \n" /* Disable MPU */
322 " \n"
323 " ldr r1, =0xe000ed9c \n" /* r1 = 0xe000ed9c [Location of RBAR]. */
324 " ldr r2, =0xe000eda0 \n" /* r2 = 0xe000eda0 [Location of RASR]. */
325 " \n"
326 " ldmia r0!, {r3-r4} \n" /* Read first set of RBAR/RASR registers from TCB. */
327 " str r3, [r1] \n" /* Program RBAR. */
328 " str r4, [r2] \n" /* Program RASR. */
329 " \n"
330 " ldmia r0!, {r3-r4} \n" /* Read second set of RBAR/RASR registers from TCB. */
331 " str r3, [r1] \n" /* Program RBAR. */
332 " str r4, [r2] \n" /* Program RASR. */
333 " \n"
334 " ldmia r0!, {r3-r4} \n" /* Read third set of RBAR/RASR registers from TCB. */
335 " str r3, [r1] \n" /* Program RBAR. */
336 " str r4, [r2] \n" /* Program RASR. */
337 " \n"
338 " ldmia r0!, {r3-r4} \n" /* Read fourth set of RBAR/RASR registers from TCB. */
339 " str r3, [r1] \n" /* Program RBAR. */
340 " str r4, [r2] \n" /* Program RASR. */
341 " \n"
342 " ldmia r0!, {r3-r4} \n" /* Read fifth set of RBAR/RASR registers from TCB. */
343 " str r3, [r1] \n" /* Program RBAR. */
344 " str r4, [r2] \n" /* Program RASR. */
345 " \n"
346 " ldr r1, =0xe000ed94 \n" /* r1 = 0xe000ed94 [Location of MPU_CTRL]. */
347 " ldr r2, [r1] \n" /* Read the value of MPU_CTRL. */
348 " movs r3, #1 \n" /* r3 = 1. */
349 " orrs r2, r3 \n" /* r2 = r2 | r3 i.e. Set the bit 0 in r2. */
350 " str r2, [r1] \n" /* Enable MPU. */
351 " dsb \n" /* Force memory writes before continuing. */
352 " \n"
353 " restore_context: \n"
354 " ldr r2, =pxCurrentTCB \n" /* r2 = &pxCurrentTCB. */
355 " ldr r0, [r2] \n" /* r0 = pxCurrentTCB.*/
356 " ldr r1, [r0] \n" /* r1 = Location of saved context in TCB. */
357 " \n"
358 " restore_special_regs: \n"
359 " subs r1, #12 \n"
360 " ldmia r1!, {r2-r4} \n" /* r2 = original PSP, r3 = CONTROL, r4 = LR. */
361 " subs r1, #12 \n"
362 " msr psp, r2 \n"
363 " msr control, r3 \n"
364 " mov lr, r4 \n"
365 " \n"
366 " restore_general_regs: \n"
367 " subs r1, #32 \n"
368 " ldmia r1!, {r4-r7} \n" /* r4-r7 contain half of the hardware saved context. */
369 " stmia r2!, {r4-r7} \n" /* Copy half of the the hardware saved context on the task stack. */
370 " ldmia r1!, {r4-r7} \n" /* r4-r7 contain rest half of the hardware saved context. */
371 " stmia r2!, {r4-r7} \n" /* Copy rest half of the the hardware saved context on the task stack. */
372 " subs r1, #48 \n"
373 " ldmia r1!, {r4-r7} \n" /* Restore r8-r11. */
374 " mov r8, r4 \n" /* r8 = r4. */
375 " mov r9, r5 \n" /* r9 = r5. */
376 " mov r10, r6 \n" /* r10 = r6. */
377 " mov r11, r7 \n" /* r11 = r7. */
378 " subs r1, #32 \n"
379 " ldmia r1!, {r4-r7} \n" /* Restore r4-r7. */
380 " subs r1, #16 \n"
381 " \n"
382 " restore_context_done: \n"
383 " str r1, [r0] \n" /* Save the location where the context should be saved next as the first member of TCB. */
384 " bx lr \n"
385 " \n"
386 " .align 4 \n"
387 );
388 }
389
390 #else /* configENABLE_MPU */
391
PendSV_Handler(void)392 void PendSV_Handler( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
393 {
394 __asm volatile
395 (
396 " .extern pxCurrentTCB \n"
397 " .syntax unified \n"
398 " \n"
399 " mrs r0, psp \n" /* Read PSP in r0. */
400 " ldr r2, =pxCurrentTCB \n" /* r2 = &( pxCurrentTCB ). */
401 " ldr r1, [r2] \n" /* r1 = pxCurrentTCB. */
402 " subs r0, r0, #36 \n" /* Make space for LR and the remaining registers on the stack. */
403 " str r0, [r1] \n" /* Save the new top of stack in TCB. */
404 " \n"
405 " mov r3, lr \n" /* r3 = LR/EXC_RETURN. */
406 " stmia r0!, {r3-r7} \n" /* Store on the stack - LR and low registers that are not automatically saved. */
407 " mov r4, r8 \n" /* r4 = r8. */
408 " mov r5, r9 \n" /* r5 = r9. */
409 " mov r6, r10 \n" /* r6 = r10. */
410 " mov r7, r11 \n" /* r7 = r11. */
411 " stmia r0!, {r4-r7} \n" /* Store the high registers that are not saved automatically. */
412 " \n"
413 " cpsid i \n"
414 " bl vTaskSwitchContext \n"
415 " cpsie i \n"
416 " \n"
417 " ldr r2, =pxCurrentTCB \n" /* r2 = &( pxCurrentTCB ). */
418 " ldr r1, [r2] \n" /* r1 = pxCurrentTCB. */
419 " ldr r0, [r1] \n" /* The first item in pxCurrentTCB is the task top of stack. r0 now points to the top of stack. */
420 " \n"
421 " adds r0, r0, #20 \n" /* Move to the high registers. */
422 " ldmia r0!, {r4-r7} \n" /* Restore the high registers that are not automatically restored. */
423 " mov r8, r4 \n" /* r8 = r4. */
424 " mov r9, r5 \n" /* r9 = r5. */
425 " mov r10, r6 \n" /* r10 = r6. */
426 " mov r11, r7 \n" /* r11 = r7. */
427 " msr psp, r0 \n" /* Remember the new top of stack for the task. */
428 " subs r0, r0, #36 \n" /* Move to the starting of the saved context. */
429 " ldmia r0!, {r3-r7} \n" /* Read from stack - r3 = LR and r4-r7 restored. */
430 " bx r3 \n"
431 " \n"
432 " .align 4 \n"
433 );
434 }
435
436 #endif /* configENABLE_MPU */
437
438 /*-----------------------------------------------------------*/
439
440 #if ( ( configENABLE_MPU == 1 ) && ( configUSE_MPU_WRAPPERS_V1 == 0 ) )
441
SVC_Handler(void)442 void SVC_Handler( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
443 {
444 __asm volatile
445 (
446 " .syntax unified \n"
447 " .extern vPortSVCHandler_C \n"
448 " .extern vSystemCallEnter \n"
449 " .extern vSystemCallExit \n"
450 " .extern pxCurrentTCB \n"
451 " \n"
452 " movs r0, #4 \n"
453 " mov r1, lr \n"
454 " tst r0, r1 \n"
455 " beq stack_on_msp \n"
456 " \n"
457 " stack_on_psp: \n"
458 " mrs r0, psp \n"
459 " b route_svc \n"
460 " \n"
461 " stack_on_msp: \n"
462 " mrs r0, msp \n"
463 " b route_svc \n"
464 " \n"
465 " route_svc: \n"
466 " ldr r3, [r0, #24] \n"
467 " subs r3, #2 \n"
468 " ldrb r2, [r3, #0] \n"
469 " ldr r3, =%0 \n"
470 " cmp r2, r3 \n"
471 " blt system_call_enter \n"
472 " ldr r3, =%1 \n"
473 " cmp r2, r3 \n"
474 " beq system_call_exit \n"
475 " b vPortSVCHandler_C \n"
476 " \n"
477 " system_call_enter: \n"
478 " push {lr} \n"
479 " bl vSystemCallEnter \n"
480 " pop {pc} \n"
481 " \n"
482 " system_call_exit: \n"
483 " push {lr} \n"
484 " bl vSystemCallExit \n"
485 " pop {pc} \n"
486 " \n"
487 " .align 4 \n"
488 " \n"
489 : /* No outputs. */
490 : "i" ( NUM_SYSTEM_CALLS ), "i" ( portSVC_SYSTEM_CALL_EXIT )
491 : "r0", "r1", "r2", "r3", "memory"
492 );
493 }
494
495 #else /* ( configENABLE_MPU == 1 ) && ( configUSE_MPU_WRAPPERS_V1 == 0 ) */
496
SVC_Handler(void)497 void SVC_Handler( void ) /* __attribute__ (( naked )) PRIVILEGED_FUNCTION */
498 {
499 __asm volatile
500 (
501 " .syntax unified \n"
502 " .extern vPortSVCHandler_C \n"
503 " \n"
504 " movs r0, #4 \n"
505 " mov r1, lr \n"
506 " tst r0, r1 \n"
507 " beq stacking_used_msp \n"
508 " \n"
509 " stacking_used_psp: \n"
510 " mrs r0, psp \n"
511 " b vPortSVCHandler_C \n"
512 " \n"
513 " stacking_used_msp: \n"
514 " mrs r0, msp \n"
515 " b vPortSVCHandler_C \n"
516 " \n"
517 " .align 4 \n"
518 );
519 }
520
521 #endif /* ( configENABLE_MPU == 1 ) && ( configUSE_MPU_WRAPPERS_V1 == 0 ) */
522
523 /*-----------------------------------------------------------*/
524