Lines Matching refs:MMAU__SAT
109 #define MMAU__SAT 0xF0004800UL /*!< Saturation */ macro
464 *((uint64_t volatile *)(MMAU__UMULD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_umuls_dl()
465 *((uint32_t volatile *)(MMAU__UMULD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_umuls_dl()
466 return *((uint64_t volatile *)(MMAU__UMULD | MMAU__A10 | MMAU__SAT)); in MMAU_d_umuls_dl()
501 *((uint32_t volatile *)(MMAU__UMULDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_umulas_l()
502 return *((uint64_t volatile *)(MMAU__UMULDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_umulas_l()
541 *((uint32_t volatile *)(MMAU__UMAC | MMAU__X2 | MMAU__SAT)) = lval1; in MMAU_d_umacs_ll()
542 *((uint32_t volatile *)(MMAU__UMAC | MMAU__X3 | MMAU__SAT)) = lval2; in MMAU_d_umacs_ll()
543 return *((uint64_t volatile *)(MMAU__UMAC | MMAU__A10 | MMAU__SAT)); in MMAU_d_umacs_ll()
584 *((uint64_t volatile *)(MMAU__UMACD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_umacs_dl()
585 *((uint32_t volatile *)(MMAU__UMACD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_umacs_dl()
586 return *((uint64_t volatile *)(MMAU__UMACD | MMAU__A10 | MMAU__SAT)); in MMAU_d_umacs_dl()
627 *((uint64_t volatile *)(MMAU__UMACDA | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_umacas_dl()
628 *((uint32_t volatile *)(MMAU__UMACDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_umacas_dl()
629 return *((uint64_t volatile *)(MMAU__UMACDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_umacas_dl()
850 *((int64_t volatile *)(MMAU__SMULD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_smuls_dl()
851 *((int32_t volatile *)(MMAU__SMULD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_smuls_dl()
852 return *((int64_t volatile *)(MMAU__SMULD | MMAU__A10 | MMAU__SAT)); in MMAU_d_smuls_dl()
887 *((int32_t volatile *)(MMAU__SMULDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_smulas_l()
888 return *((int64_t volatile *)(MMAU__SMULDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_smulas_l()
927 *((int32_t volatile *)(MMAU__SMAC | MMAU__X2 | MMAU__SAT)) = lval1; in MMAU_d_smacs_ll()
928 *((int32_t volatile *)(MMAU__SMAC | MMAU__X3 | MMAU__SAT)) = lval2; in MMAU_d_smacs_ll()
929 return *((int64_t volatile *)(MMAU__SMAC | MMAU__A10 | MMAU__SAT)); in MMAU_d_smacs_ll()
972 *((int64_t volatile *)(MMAU__SMACD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_smacs_dl()
973 *((int32_t volatile *)(MMAU__SMACD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_smacs_dl()
974 return *((int64_t volatile *)(MMAU__SMACD | MMAU__A10 | MMAU__SAT)); in MMAU_d_smacs_dl()
1016 *((int64_t volatile *)(MMAU__SMACDA | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_smacas_dl()
1017 *((int32_t volatile *)(MMAU__SMACDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_smacas_dl()
1018 return *((int64_t volatile *)(MMAU__SMACDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_smacas_dl()
1053 *((int32_t volatile *)(MMAU__SDIV | MMAU__X2 | MMAU__SAT)) = lnum; in MMAU_l_sdivs_ll()
1054 *((int32_t volatile *)(MMAU__SDIV | MMAU__X3 | MMAU__SAT)) = lden; in MMAU_l_sdivs_ll()
1055 return *((int32_t volatile *)(MMAU__SDIV | MMAU__A0 | MMAU__SAT)); in MMAU_l_sdivs_ll()
1090 *((int64_t volatile *)(MMAU__SDIVD | MMAU__X1 | MMAU__SAT)) = dnum; in MMAU_d_sdivs_dl()
1091 *((int32_t volatile *)(MMAU__SDIVD | MMAU__X3 | MMAU__SAT)) = lden; in MMAU_d_sdivs_dl()
1092 return *((int64_t volatile *)(MMAU__SDIVD | MMAU__A10 | MMAU__SAT)); in MMAU_d_sdivs_dl()
1127 *((int64_t volatile *)(MMAU__SDIVDD | MMAU__X0 | MMAU__SAT)) = dnum; in MMAU_d_sdivs_dd()
1128 *((int64_t volatile *)(MMAU__SDIVDD | MMAU__X2 | MMAU__SAT)) = dden; in MMAU_d_sdivs_dd()
1129 return *((int64_t volatile *)(MMAU__SDIVDD | MMAU__A10 | MMAU__SAT)); in MMAU_d_sdivs_dd()
1163 *((int32_t volatile *)(MMAU__SDIVDA | MMAU__X3 | MMAU__SAT)) = lden1; in MMAU_d_sdivas_l()
1164 return *((int64_t volatile *)(MMAU__SDIVDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_sdivas_l()
1198 *((int64_t volatile *)(MMAU__SDIVDDA | MMAU__X2 | MMAU__SAT)) = dden1; in MMAU_d_sdivas_d()
1199 return *((int64_t volatile *)(MMAU__SDIVDDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_sdivas_d()
1253 *((frac32_t volatile *)(MMAU__QMUL | MMAU__X2 | MMAU__SAT)) = lval1; in MMAU_l_muls_ll()
1254 *((frac32_t volatile *)(MMAU__QMUL | MMAU__X3 | MMAU__SAT)) = lval2; in MMAU_l_muls_ll()
1255 return *((frac32_t volatile *)(MMAU__QMUL | MMAU__A1 | MMAU__SAT)); in MMAU_l_muls_ll()
1292 *((frac32_t volatile *)(MMAU__QMUL | MMAU__X2 | MMAU__SAT)) = lval1; in MMAU_d_muls_ll()
1293 *((frac32_t volatile *)(MMAU__QMUL | MMAU__X3 | MMAU__SAT)) = lval2; in MMAU_d_muls_ll()
1294 return *((frac64_t volatile *)(MMAU__QMUL | MMAU__A10 | MMAU__SAT)); in MMAU_d_muls_ll()
1331 *((frac64_t volatile *)(MMAU__QMULD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_muls_dl()
1332 *((frac32_t volatile *)(MMAU__QMULD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_muls_dl()
1333 return *((frac64_t volatile *)(MMAU__QMULD | MMAU__A10 | MMAU__SAT)); in MMAU_d_muls_dl()
1370 *((frac32_t volatile *)(MMAU__QMULDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_mulas_l()
1371 return *((frac64_t volatile *)(MMAU__QMULDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_mulas_l()
1407 *((frac64_t volatile *)(MMAU__QMULD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_l_muls_dl()
1408 *((frac32_t volatile *)(MMAU__QMULD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_l_muls_dl()
1409 return *((frac32_t volatile *)(MMAU__QMULD | MMAU__A1 | MMAU__SAT)); in MMAU_l_muls_dl()
1445 *((frac32_t volatile *)(MMAU__QMULDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_l_mulas_l()
1446 return *((frac32_t volatile *)(MMAU__QMULDA | MMAU__A1 | MMAU__SAT)); in MMAU_l_mulas_l()
1487 *((frac32_t volatile *)(MMAU__QMAC | MMAU__X2 | MMAU__SAT)) = lval1; in MMAU_d_macs_ll()
1488 *((frac32_t volatile *)(MMAU__QMAC | MMAU__X3 | MMAU__SAT)) = lval2; in MMAU_d_macs_ll()
1489 return *((frac64_t volatile *)(MMAU__QMAC | MMAU__A10 | MMAU__SAT)); in MMAU_d_macs_ll()
1532 *((frac64_t volatile *)(MMAU__QMACD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_macs_dl()
1533 *((frac32_t volatile *)(MMAU__QMACD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_macs_dl()
1534 return *((frac64_t volatile *)(MMAU__QMACD | MMAU__A10 | MMAU__SAT)); in MMAU_d_macs_dl()
1577 *((frac64_t volatile *)(MMAU__QMACDA | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_d_macas_dl()
1578 *((frac32_t volatile *)(MMAU__QMACDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_d_macas_dl()
1579 return *((frac64_t volatile *)(MMAU__QMACDA | MMAU__A10 | MMAU__SAT)); in MMAU_d_macas_dl()
1618 *((frac32_t volatile *)(MMAU__QMAC | MMAU__X2 | MMAU__SAT)) = lval1; in MMAU_l_macs_ll()
1619 *((frac32_t volatile *)(MMAU__QMAC | MMAU__X3 | MMAU__SAT)) = lval2; in MMAU_l_macs_ll()
1620 return *((frac32_t volatile *)(MMAU__QMAC | MMAU__A1 | MMAU__SAT)); in MMAU_l_macs_ll()
1661 *((frac64_t volatile *)(MMAU__QMACD | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_l_macs_dl()
1662 *((frac32_t volatile *)(MMAU__QMACD | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_l_macs_dl()
1663 return *((frac32_t volatile *)(MMAU__QMACD | MMAU__A1 | MMAU__SAT)); in MMAU_l_macs_dl()
1704 *((frac64_t volatile *)(MMAU__QMACDA | MMAU__X1 | MMAU__SAT)) = dval; in MMAU_l_macas_dl()
1705 *((frac32_t volatile *)(MMAU__QMACDA | MMAU__X3 | MMAU__SAT)) = lval; in MMAU_l_macas_dl()
1706 return *((frac32_t volatile *)(MMAU__QMACDA | MMAU__A1 | MMAU__SAT)); in MMAU_l_macas_dl()
1743 *((frac32_t volatile *)(MMAU__QDIV | MMAU__X2 | MMAU__SAT)) = lnum; in MMAU_l_divs_ll()
1744 *((frac32_t volatile *)(MMAU__QDIV | MMAU__X3 | MMAU__SAT)) = lden; in MMAU_l_divs_ll()
1745 return *((frac32_t volatile *)(MMAU__QDIV | MMAU__A1 | MMAU__SAT)); in MMAU_l_divs_ll()
1763 *((frac32_t volatile *)(MMAU__QDIVDA | MMAU__X3 | MMAU__SAT)) = lden; in MMAU_l_divas_l()
1764 return *((frac32_t volatile *)(MMAU__QDIVDA | MMAU__A1 | MMAU__SAT)); in MMAU_l_divas_l()
1801 *((frac64_t volatile *)(MMAU__QDIVD | MMAU__X1 | MMAU__SAT)) = dnum; in MMAU_d_divs_dl()
1802 *((frac32_t volatile *)(MMAU__QDIVD | MMAU__X3 | MMAU__SAT)) = lden; in MMAU_d_divs_dl()
1803 return *((frac64_t volatile *)(MMAU__QDIVD | MMAU__A10 | MMAU__SAT)); in MMAU_d_divs_dl()
1839 *((frac32_t volatile *)(MMAU__QDIVDA | MMAU__X3 | MMAU__SAT)) = lden1; in MMAU_d_divas_l()
1840 return *((frac64_t volatile *)(MMAU__QDIVDA | MMAU__A0 | MMAU__SAT)); in MMAU_d_divas_l()