Lines Matching refs:vecA

83     f32x4_t    vecB, vecA;  in arm_mat_cmplx_mult_f32_2x2_mve()  local
95 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_2x2_mve()
96 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
97 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
99 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_2x2_mve()
100 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
101 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
116 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_2x2_mve()
117 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
118 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
120 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_2x2_mve()
121 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
122 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_2x2_mve()
148 f32x4_t vecB, vecA; in arm_mat_cmplx_mult_f32_3x3_mve() local
166 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_3x3_mve()
167 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
168 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
170 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_3x3_mve()
171 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
172 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
174 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_3x3_mve()
175 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
176 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
181 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
182 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
183 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
185 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
186 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
187 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
189 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
190 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
191 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
209 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_3x3_mve()
210 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
211 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
213 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_3x3_mve()
214 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
215 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
217 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_3x3_mve()
218 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
219 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
223 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
224 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
225 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
227 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
228 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
229 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
231 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
232 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
233 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
251 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_3x3_mve()
252 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
253 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
255 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_3x3_mve()
256 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
257 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
259 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_3x3_mve()
260 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
261 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
265 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
266 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
267 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
269 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
270 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
271 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
273 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_3x3_mve()
274 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
275 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_3x3_mve()
306 f32x4_t vecB, vecA; in arm_mat_cmplx_mult_f32_4x4_mve() local
322 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_4x4_mve()
323 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
324 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
326 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_4x4_mve()
327 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
328 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
330 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_4x4_mve()
331 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
332 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
334 vecA = vldrwq_f32(pInA3); in arm_mat_cmplx_mult_f32_4x4_mve()
335 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
336 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
340 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
341 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
342 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
344 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
345 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
346 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
348 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
349 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
350 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
352 vecA = vldrwq_f32(&pInA3[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
353 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
354 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
373 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_4x4_mve()
374 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
375 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
377 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_4x4_mve()
378 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
379 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
381 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_4x4_mve()
382 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
383 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
385 vecA = vldrwq_f32(pInA3); in arm_mat_cmplx_mult_f32_4x4_mve()
386 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
387 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
391 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
392 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
393 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
395 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
396 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
397 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
399 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
400 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
401 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
403 vecA = vldrwq_f32(&pInA3[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
404 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
405 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
424 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_4x4_mve()
425 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
426 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
428 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_4x4_mve()
429 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
430 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
432 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_4x4_mve()
433 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
434 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
436 vecA = vldrwq_f32(pInA3); in arm_mat_cmplx_mult_f32_4x4_mve()
437 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
438 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
442 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
443 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
444 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
446 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
447 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
448 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
450 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
451 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
452 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
454 vecA = vldrwq_f32(&pInA3[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
455 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
456 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
475 vecA = vldrwq_f32(pInA0); in arm_mat_cmplx_mult_f32_4x4_mve()
476 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
477 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
479 vecA = vldrwq_f32(pInA1); in arm_mat_cmplx_mult_f32_4x4_mve()
480 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
481 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
483 vecA = vldrwq_f32(pInA2); in arm_mat_cmplx_mult_f32_4x4_mve()
484 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
485 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
487 vecA = vldrwq_f32(pInA3); in arm_mat_cmplx_mult_f32_4x4_mve()
488 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
489 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
493 vecA = vldrwq_f32(&pInA0[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
494 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
495 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
497 vecA = vldrwq_f32(&pInA1[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
498 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
499 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
501 vecA = vldrwq_f32(&pInA2[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
502 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
503 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
505 vecA = vldrwq_f32(&pInA3[4]); in arm_mat_cmplx_mult_f32_4x4_mve()
506 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
507 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32_4x4_mve()
640 f32x4_t vecB, vecA; in arm_mat_cmplx_mult_f32() local
648 vecA = vld1q(pSrcA0Vec); pSrcA0Vec += 4; in arm_mat_cmplx_mult_f32()
649 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
650 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
651 vecA = vld1q(pSrcA1Vec); pSrcA1Vec += 4; in arm_mat_cmplx_mult_f32()
652 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32()
653 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32()
654 vecA = vld1q(pSrcA2Vec); pSrcA2Vec += 4; in arm_mat_cmplx_mult_f32()
655 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32()
656 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32()
657 vecA = vld1q(pSrcA3Vec); pSrcA3Vec += 4; in arm_mat_cmplx_mult_f32()
658 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32()
659 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32()
673 f32x4_t vecB, vecA; in arm_mat_cmplx_mult_f32() local
681 vecA = vld1q(pSrcA0Vec); in arm_mat_cmplx_mult_f32()
682 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
683 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
684 vecA = vld1q(pSrcA1Vec); in arm_mat_cmplx_mult_f32()
685 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32()
686 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f32()
687 vecA = vld1q(pSrcA2Vec); in arm_mat_cmplx_mult_f32()
688 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32()
689 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f32()
690 vecA = vld1q(pSrcA3Vec); in arm_mat_cmplx_mult_f32()
691 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32()
692 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f32()
771 f32x4_t vecB, vecA; in arm_mat_cmplx_mult_f32() local
779 vecA = vld1q(pSrcA0Vec); in arm_mat_cmplx_mult_f32()
781 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
782 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
796 f32x4_t vecB, vecA; in arm_mat_cmplx_mult_f32() local
800 vecA = vld1q(pSrcA0Vec); in arm_mat_cmplx_mult_f32()
801 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()
802 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f32()