| /cmsis_6-latest/CMSIS/Core/Include/m-profile/ |
| D | cmsis_gcc_m.h | 131 __ASM volatile ("MRS %0, control" : "=r" (result) ); in __get_CONTROL() 146 __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); in __TZ_get_CONTROL_NS() 159 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); in __set_CONTROL() 172 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); in __TZ_set_CONTROL_NS() 187 __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); in __get_IPSR() 201 __ASM volatile ("MRS %0, apsr" : "=r" (result) ); in __get_APSR() 215 __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); in __get_xPSR() 229 __ASM volatile ("MRS %0, psp" : "=r" (result) ); in __get_PSP() 244 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); in __TZ_get_PSP_NS() 257 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); in __set_PSP() [all …]
|
| D | cmsis_armclang_m.h | 88 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRBT() 100 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRHT() 112 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) ); in __STRT() 132 __ASM volatile ("MRS %0, control" : "=r" (result) ); in __get_CONTROL() 147 __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); in __TZ_get_CONTROL_NS() 160 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); in __set_CONTROL() 173 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); in __TZ_set_CONTROL_NS() 188 __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); in __get_IPSR() 202 __ASM volatile ("MRS %0, apsr" : "=r" (result) ); in __get_APSR() 216 __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); in __get_xPSR() [all …]
|
| D | cmsis_clang_m.h | 94 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRBT() 106 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRHT() 118 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) ); in __STRT() 138 __ASM volatile ("MRS %0, control" : "=r" (result) ); in __get_CONTROL() 153 __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); in __TZ_get_CONTROL_NS() 166 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); in __set_CONTROL() 179 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); in __TZ_set_CONTROL_NS() 194 __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); in __get_IPSR() 208 __ASM volatile ("MRS %0, apsr" : "=r" (result) ); in __get_APSR() 222 __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); in __get_xPSR() [all …]
|
| D | cmsis_tiarmclang_m.h | 35 #ifndef __ASM 36 #define __ASM __asm macro 100 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 268 #define __BKPT(value) __ASM volatile ("bkpt "#value) 449 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value)); in __RRX() 464 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRBT() 479 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRHT() 494 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRT() 507 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRBT() 519 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRHT() [all …]
|
| D | armv81m_pac.h | 48 __ASM volatile ( in __get_PAC_KEY_P() 67 __ASM volatile ( in __set_PAC_KEY_P() 86 __ASM volatile ( in __get_PAC_KEY_U() 105 __ASM volatile ( in __set_PAC_KEY_U() 126 __ASM volatile ( in __TZ_get_PAC_KEY_P_NS() 145 __ASM volatile ( in __TZ_set_PAC_KEY_P_NS() 164 __ASM volatile ( in __TZ_get_PAC_KEY_U_NS() 183 __ASM volatile ( in __TZ_set_PAC_KEY_U_NS()
|
| /cmsis_6-latest/CMSIS/Core/Include/ |
| D | cmsis_gcc.h | 38 #ifndef __ASM 39 #define __ASM __asm macro 110 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 142 #define __NOP() __ASM volatile ("nop") 149 #define __WFI() __ASM volatile ("wfi":::"memory") 157 #define __WFE() __ASM volatile ("wfe":::"memory") 164 #define __SEV() __ASM volatile ("sev") 175 __ASM volatile ("isb 0xF":::"memory"); in __ISB() 186 __ASM volatile ("dsb 0xF":::"memory"); in __DSB() 197 __ASM volatile ("dmb 0xF":::"memory"); in __DMB() [all …]
|
| D | cmsis_iccarm.h | 37 #ifndef __ASM 38 #define __ASM __asm macro 182 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 247 __ASM volatile("CLREX" ::: "memory"); in __CLREX() 351 __ASM volatile ("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); in __LDRBT() 358 __ASM volatile ("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); in __LDRHT() 365 __ASM volatile ("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory"); in __LDRT() 371 __ASM volatile ("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); in __STRBT() 376 __ASM volatile ("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); in __STRHT() 381 __ASM volatile ("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory"); in __STRT() [all …]
|
| D | cmsis_armclang.h | 37 #ifndef __ASM 38 #define __ASM __asm macro 105 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 231 #define __BKPT(value) __ASM volatile ("bkpt "#value) 412 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value)); in __RRX() 427 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRBT() 442 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRHT() 457 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRT() 474 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); in __LDAB() 489 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); in __LDAH() [all …]
|
| D | cmsis_clang.h | 42 #ifndef __ASM 43 #define __ASM __asm macro 110 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 236 #define __BKPT(value) __ASM volatile ("bkpt "#value) 417 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value)); in __RRX() 432 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRBT() 447 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRHT() 462 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRT() 479 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); in __LDAB() 494 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); in __LDAH() [all …]
|
| D | cmsis_compiler.h | 68 #ifndef __ASM 69 #define __ASM __asm macro 141 #ifndef __ASM 142 #define __ASM __asm macro 211 #ifndef __ASM 212 #define __ASM _asm macro
|
| /cmsis_6-latest/CMSIS/Core/Include/a-profile/ |
| D | cmsis_armclang_a.h | 40 __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRBT() 52 __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRHT() 64 __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) ); in __STRT() 140 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 149 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 151 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 160 __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate)); in __SXTB16_RORn() 174 … __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate)); in __SXTAB16_RORn() 187 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); in __SMMLA() 206 __ASM volatile("MRS %0, cpsr" : "=r" (result) ); in __get_CPSR() [all …]
|
| D | cmsis_clang_a.h | 39 __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRBT() 51 __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRHT() 63 __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) ); in __STRT() 136 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 145 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 147 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 156 __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate)); in __SXTB16_RORn() 170 … __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate)); in __SXTAB16_RORn() 183 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); in __SMMLA() 202 __ASM volatile("MRS %0, cpsr" : "=r" (result) ); in __get_CPSR() [all …]
|
| D | cmsis_gcc_a.h | 44 __ASM volatile("MRS %0, cpsr" : "=r" (result) ); in __get_CPSR() 53 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); in __set_CPSR() 69 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); in __set_mode() 78 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); in __get_SP() 87 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); in __set_SP() 97 __ASM volatile( in __get_SP_usr() 112 __ASM volatile( in __set_SP_usr() 127 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); in __get_FPEXC() 140 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); in __set_FPEXC() 148 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn… [all …]
|
| D | cmsis_iccarm_a.h | 55 __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) 58 __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) 66 __ASM volatile( in __get_SP_usr() 79 __ASM volatile( in __set_SP_usr() 93 __ASM volatile( in __FPU_Enable()
|
| /cmsis_6-latest/CMSIS/Core/Include/r-profile/ |
| D | cmsis_armclang_r.h | 47 __ASM volatile("MRS %0, cpsr" : "=r" (result) ); in __get_CPSR() 56 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); in __set_CPSR() 72 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); in __set_mode() 81 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); in __get_SP() 90 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); in __set_SP() 100 __ASM volatile( in __get_SP_usr() 116 __ASM volatile( in __set_SP_usr() 132 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); in __get_FPEXC() 145 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); in __set_FPEXC() 156 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn… [all …]
|
| D | cmsis_clang_r.h | 47 __ASM volatile("MRS %0, cpsr" : "=r" (result) ); in __get_CPSR() 56 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); in __set_CPSR() 72 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); in __set_mode() 81 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); in __get_SP() 90 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); in __set_SP() 100 __ASM volatile( in __get_SP_usr() 116 __ASM volatile( in __set_SP_usr() 132 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); in __get_FPEXC() 145 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); in __set_FPEXC() 156 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn… [all …]
|
| D | cmsis_gcc_r.h | 50 __ASM volatile("MRS %0, cpsr" : "=r" (result) ); in __get_CPSR() 59 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); in __set_CPSR() 75 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); in __set_mode() 84 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); in __get_SP() 93 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); in __set_SP() 103 __ASM volatile( in __get_SP_usr() 118 __ASM volatile( in __set_SP_usr() 133 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); in __get_FPEXC() 146 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); in __set_FPEXC() 154 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn… [all …]
|
| D | cmsis_iccarm_r.h | 54 __ASM volatile("MRRC p" #cp ", " #op1 ", %Q0, %R0, c" #CRm \ 60 __ASM volatile("MCRR p" #cp ", " #op1 ", %Q0, %R0, c" #CRm \ 69 __ASM volatile("MRS %0, cpsr \n" in __get_SP_usr() 82 __ASM volatile("MRS %0, cpsr \n" in __set_SP_usr() 96 __ASM volatile( in __FPU_Enable()
|
| /cmsis_6-latest/CMSIS/CoreValidation/Source/ |
| D | CV_CoreAFunc.c | 65 #define __SUBS(Rd, Rm, Rn) __ASM volatile("SUBS " # Rd ", " # Rm ", " # Rn) 66 #define __ADDS(Rd, Rm, Rn) __ASM volatile("ADDS " # Rd ", " # Rm ", " # Rn) 68 #define __SUBS(Rd, Rm, Rn) __ASM volatile("SUB %0, %1, %2" : "=r"(Rd) : "r"(Rm), "r"(Rn)) 69 #define __ADDS(Rd, Rm, Rn) __ASM volatile("ADD %0, %1, %2" : "=r"(Rd) : "r"(Rm), "r"(Rn)) 71 #define __SUBS(Rd, Rm, Rn) __ASM volatile("SUBS %0, %1, %2" : "=r"(Rd) : "r"(Rm), "r"(Rn)) 72 #define __ADDS(Rd, Rm, Rn) __ASM volatile("ADDS %0, %1, %2" : "=r"(Rd) : "r"(Rm), "r"(Rn))
|
| D | CV_CoreFunc.c | 311 #define SUBS(Rd, Rm, Rn) __ASM volatile("SUBS " # Rd ", " # Rm ", " # Rn) 312 #define ADDS(Rd, Rm, Rn) __ASM volatile("ADDS " # Rd ", " # Rm ", " # Rn) 319 #define SUBS(Rd, Rm, Rn) __ASM volatile("SUBS %0, %1, %2" : "=r"(Rd) : "r"(Rm), "r"(Rn) : "cc") 320 #define ADDS(Rd, Rm, Rn) __ASM volatile("ADDS %0, %1, %2" : "=r"(Rd) : "r"(Rm), "r"(Rn) : "cc")
|
| /cmsis_6-latest/CMSIS/CoreValidation/Layer/Target/CA9/RTE/Device/ARMCA9/ |
| D | startup_ARMCA9.c | 59 __ASM volatile( in Vectors() 75 __ASM volatile( in Reset_Handler()
|
| /cmsis_6-latest/CMSIS/CoreValidation/Layer/Target/CA7/RTE/Device/ARMCA7/ |
| D | startup_ARMCA7.c | 59 __ASM volatile( in Vectors() 75 __ASM volatile( in Reset_Handler()
|
| /cmsis_6-latest/CMSIS/CoreValidation/Layer/Target/CA5/RTE/Device/ARMCA5/ |
| D | startup_ARMCA5.c | 59 __ASM volatile( in Vectors() 75 __ASM volatile( in Reset_Handler()
|
| /cmsis_6-latest/CMSIS/Core/Template/Device_A/Source/ |
| D | startup_Device.c | 57 __ASM void Vectors(void) { in Vectors() 77 __ASM void Reset_Handler(void) { in Reset_Handler()
|
| /cmsis_6-latest/CMSIS/Documentation/Doxygen/Core/src/ |
| D | misra.md | 8 …s assembly statements to access core registers on several places. These locations start with __ASM.
|