Lines Matching refs:__ASM

40 #ifndef   __ASM
41 #define __ASM __asm macro
117 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
198 __ASM volatile ("cpsie i" : : : "memory"); in __enable_irq()
209 __ASM volatile ("cpsid i" : : : "memory"); in __disable_irq()
222 __ASM volatile ("MRS %0, control" : "=r" (result) ); in __get_CONTROL()
237 __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); in __TZ_get_CONTROL_NS()
250 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); in __set_CONTROL()
262 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); in __TZ_set_CONTROL_NS()
276 __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); in __get_IPSR()
290 __ASM volatile ("MRS %0, apsr" : "=r" (result) ); in __get_APSR()
304 __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); in __get_xPSR()
318 __ASM volatile ("MRS %0, psp" : "=r" (result) ); in __get_PSP()
333 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); in __TZ_get_PSP_NS()
346 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); in __set_PSP()
358 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); in __TZ_set_PSP_NS()
372 __ASM volatile ("MRS %0, msp" : "=r" (result) ); in __get_MSP()
387 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); in __TZ_get_MSP_NS()
400 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); in __set_MSP()
412 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); in __TZ_set_MSP_NS()
427 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); in __TZ_get_SP_NS()
439 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); in __TZ_set_SP_NS()
453 __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory"); in __get_PRIMASK()
468 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory"); in __TZ_get_PRIMASK_NS()
481 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); in __set_PRIMASK()
493 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); in __TZ_set_PRIMASK_NS()
508 __ASM volatile ("cpsie f" : : : "memory"); in __enable_fault_irq()
519 __ASM volatile ("cpsid f" : : : "memory"); in __disable_fault_irq()
532 __ASM volatile ("MRS %0, basepri" : "=r" (result) ); in __get_BASEPRI()
547 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); in __TZ_get_BASEPRI_NS()
560 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); in __set_BASEPRI()
572 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); in __TZ_set_BASEPRI_NS()
585 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); in __set_BASEPRI_MAX()
598 __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); in __get_FAULTMASK()
613 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); in __TZ_get_FAULTMASK_NS()
626 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); in __set_FAULTMASK()
638 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); in __TZ_set_FAULTMASK_NS()
667 __ASM volatile ("MRS %0, psplim" : "=r" (result) ); in __get_PSPLIM()
688 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); in __TZ_get_PSPLIM_NS()
711 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); in __set_PSPLIM()
731 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); in __TZ_set_PSPLIM_NS()
754 __ASM volatile ("MRS %0, msplim" : "=r" (result) ); in __get_MSPLIM()
776 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); in __TZ_get_MSPLIM_NS()
799 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); in __set_MSPLIM()
819 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); in __TZ_set_MSPLIM_NS()
845 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) ); in __get_FPSCR()
869 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory"); in __set_FPSCR()
903 #define __NOP() __ASM volatile ("nop")
909 #define __WFI() __ASM volatile ("wfi")
917 #define __WFE() __ASM volatile ("wfe")
924 #define __SEV() __ASM volatile ("sev")
935 __ASM volatile ("isb 0xF":::"memory"); in __ISB()
946 __ASM volatile ("dsb 0xF":::"memory"); in __DSB()
957 __ASM volatile ("dmb 0xF":::"memory"); in __DMB()
974 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); in __REV()
990 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); in __REV16()
1008 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); in __REVSH()
1039 #define __BKPT(value) __ASM volatile ("bkpt "#value)
1055 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) ); in __RBIT()
1112 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) ); in __LDREXB()
1117 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); in __LDREXB()
1134 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) ); in __LDREXH()
1139 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); in __LDREXH()
1155 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) ); in __LDREXW()
1172 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); in __STREXB()
1189 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); in __STREXH()
1206 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) ); in __STREXW()
1217 __ASM volatile ("clrex" ::: "memory"); in __CLREX()
1240 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1256 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1272 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); in __RRX()
1288 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRBT()
1293 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" ); in __LDRBT()
1310 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRHT()
1315 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" ); in __LDRHT()
1331 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDRT()
1344 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRBT()
1356 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STRHT()
1368 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) ); in __STRT()
1441 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDAB()
1456 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDAH()
1471 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDA()
1484 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STLB()
1496 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STLH()
1508 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STL()
1522 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDAEXB()
1537 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDAEXH()
1552 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) ); in __LDAEX()
1569 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STLEXB()
1586 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STLEXH()
1603 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) ); in __STLEX()
1625 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SADD8()
1633 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QADD8()
1641 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHADD8()
1649 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UADD8()
1657 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQADD8()
1665 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHADD8()
1674 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SSUB8()
1682 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSUB8()
1690 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHSUB8()
1698 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USUB8()
1706 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQSUB8()
1714 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHSUB8()
1723 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SADD16()
1731 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QADD16()
1739 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHADD16()
1747 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UADD16()
1755 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQADD16()
1763 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHADD16()
1771 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SSUB16()
1779 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSUB16()
1787 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHSUB16()
1795 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USUB16()
1803 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQSUB16()
1811 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHSUB16()
1819 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SASX()
1827 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QASX()
1835 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHASX()
1843 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UASX()
1851 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQASX()
1859 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHASX()
1867 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SSAX()
1875 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSAX()
1883 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHSAX()
1891 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USAX()
1899 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQSAX()
1907 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHSAX()
1915 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USAD8()
1923 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __USADA8()
1930 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1937 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1945 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1)); in __UXTB16()
1953 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UXTAB16()
1961 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1)); in __SXTB16()
1969 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SXTAB16()
1977 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUAD()
1985 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUADX()
1993 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLAD()
2001 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLADX()
2014__ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op… in __SMLALD()
2016__ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op… in __SMLALD()
2031__ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (o… in __SMLALDX()
2033__ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (o… in __SMLALDX()
2043 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUSD()
2051 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUSDX()
2059 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLSD()
2067 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLSDX()
2080__ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op… in __SMLSLD()
2082__ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op… in __SMLSLD()
2097__ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (o… in __SMLSLDX()
2099__ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (o… in __SMLSLDX()
2109 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SEL()
2117 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QADD()
2125 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSUB()
2133 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2141 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2143 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2158 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); in __SMMLA()