Lines Matching refs:vecTmp0

43     q31x4_t vecTmp0, vecTmp1;  in _arm_radix4_butterfly_q31_mve()  local
107 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_q31_mve()
108 vst1q(inA, vecTmp0); in _arm_radix4_butterfly_q31_mve()
113 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_q31_mve()
119 vecTmp1 = MVE_CMPLX_MULT_FX_AxB(vecW, vecTmp0, q31x4_t); in _arm_radix4_butterfly_q31_mve()
126 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q31_mve()
132 vecTmp1 = MVE_CMPLX_MULT_FX_AxB(vecW, vecTmp0, q31x4_t); in _arm_radix4_butterfly_q31_mve()
138 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q31_mve()
144 vecTmp1 = MVE_CMPLX_MULT_FX_AxB(vecW, vecTmp0, q31x4_t); in _arm_radix4_butterfly_q31_mve()
198 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_q31_mve()
199 vstrwq_scatter_base_s32(vecScGathAddr, -64, vecTmp0); in _arm_radix4_butterfly_q31_mve()
201 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_q31_mve()
202 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 8, vecTmp0); in _arm_radix4_butterfly_q31_mve()
204 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q31_mve()
205 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 16, vecTmp0); in _arm_radix4_butterfly_q31_mve()
207 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q31_mve()
208 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 24, vecTmp0); in _arm_radix4_butterfly_q31_mve()
295 q31x4_t vecTmp0, vecTmp1; in _arm_radix4_butterfly_inverse_q31_mve() local
357 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q31_mve()
358 vst1q(inA, vecTmp0); in _arm_radix4_butterfly_inverse_q31_mve()
363 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q31_mve()
369 vecTmp1 = MVE_CMPLX_MULT_FX_AxConjB(vecTmp0, vecW, q31x4_t); in _arm_radix4_butterfly_inverse_q31_mve()
376 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q31_mve()
382 vecTmp1 = MVE_CMPLX_MULT_FX_AxConjB(vecTmp0, vecW, q31x4_t); in _arm_radix4_butterfly_inverse_q31_mve()
388 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q31_mve()
394 vecTmp1 = MVE_CMPLX_MULT_FX_AxConjB(vecTmp0, vecW, q31x4_t); in _arm_radix4_butterfly_inverse_q31_mve()
448 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q31_mve()
449 vstrwq_scatter_base_s32(vecScGathAddr, -64, vecTmp0); in _arm_radix4_butterfly_inverse_q31_mve()
451 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q31_mve()
452 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 8, vecTmp0); in _arm_radix4_butterfly_inverse_q31_mve()
454 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q31_mve()
455 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 16, vecTmp0); in _arm_radix4_butterfly_inverse_q31_mve()
457 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q31_mve()
458 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 24, vecTmp0); in _arm_radix4_butterfly_inverse_q31_mve()