Lines Matching refs:vecTmp0
41 q15x8_t vecTmp0, vecTmp1; in _arm_radix4_butterfly_q15_mve() local
103 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_q15_mve()
104 vst1q(inA, vecTmp0); in _arm_radix4_butterfly_q15_mve()
109 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_q15_mve()
115 vecTmp1 = MVE_CMPLX_MULT_FX_AxB(vecW, vecTmp0, q15x8_t); in _arm_radix4_butterfly_q15_mve()
122 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q15_mve()
128 vecTmp1 = MVE_CMPLX_MULT_FX_AxB(vecW, vecTmp0, q15x8_t); in _arm_radix4_butterfly_q15_mve()
135 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q15_mve()
141 vecTmp1 = MVE_CMPLX_MULT_FX_AxB(vecW, vecTmp0, q15x8_t); in _arm_radix4_butterfly_q15_mve()
187 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_q15_mve()
188 vstrwq_scatter_base_s32(vecScGathAddr, -64, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_q15_mve()
190 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_q15_mve()
191 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 4, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_q15_mve()
193 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q15_mve()
194 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 8, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_q15_mve()
196 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_q15_mve()
197 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 12, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_q15_mve()
277 q15x8_t vecTmp0, vecTmp1; in _arm_radix4_butterfly_inverse_q15_mve() local
341 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q15_mve()
342 vst1q(inA, vecTmp0); in _arm_radix4_butterfly_inverse_q15_mve()
347 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q15_mve()
353 vecTmp1 = MVE_CMPLX_MULT_FX_AxConjB(vecTmp0, vecW, q15x8_t); in _arm_radix4_butterfly_inverse_q15_mve()
360 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q15_mve()
366 vecTmp1 = MVE_CMPLX_MULT_FX_AxConjB(vecTmp0, vecW, q15x8_t); in _arm_radix4_butterfly_inverse_q15_mve()
372 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q15_mve()
378 vecTmp1 = MVE_CMPLX_MULT_FX_AxConjB(vecTmp0, vecW, q15x8_t); in _arm_radix4_butterfly_inverse_q15_mve()
424 vecTmp0 = vhaddq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q15_mve()
425 vstrwq_scatter_base_s32(vecScGathAddr, -64, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_inverse_q15_mve()
427 vecTmp0 = vhsubq(vecSum0, vecSum1); in _arm_radix4_butterfly_inverse_q15_mve()
428 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 4, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_inverse_q15_mve()
430 vecTmp0 = MVE_CMPLX_ADD_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q15_mve()
431 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 8, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_inverse_q15_mve()
433 vecTmp0 = MVE_CMPLX_SUB_FX_A_ixB(vecDiff0, vecDiff1); in _arm_radix4_butterfly_inverse_q15_mve()
434 vstrwq_scatter_base_s32(vecScGathAddr, -64 + 12, (int32x4_t) vecTmp0); in _arm_radix4_butterfly_inverse_q15_mve()