Lines Matching refs:vecA
251 f16x8_t vecB, vecA; in arm_mat_cmplx_mult_f16_4x4_mve() local
264 vecA = vldrhq_f16(pInA0); in arm_mat_cmplx_mult_f16_4x4_mve()
265 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
266 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
268 vecA = vldrhq_f16(pInA1); in arm_mat_cmplx_mult_f16_4x4_mve()
269 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
270 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
272 vecA = vldrhq_f16(pInA2); in arm_mat_cmplx_mult_f16_4x4_mve()
273 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
274 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
276 vecA = vldrhq_f16(pInA3); in arm_mat_cmplx_mult_f16_4x4_mve()
277 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
278 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
293 vecA = vldrhq_f16(pInA0); in arm_mat_cmplx_mult_f16_4x4_mve()
294 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
295 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
297 vecA = vldrhq_f16(pInA1); in arm_mat_cmplx_mult_f16_4x4_mve()
298 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
299 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
301 vecA = vldrhq_f16(pInA2); in arm_mat_cmplx_mult_f16_4x4_mve()
302 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
303 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
305 vecA = vldrhq_f16(pInA3); in arm_mat_cmplx_mult_f16_4x4_mve()
306 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
307 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
322 vecA = vldrhq_f16(pInA0); in arm_mat_cmplx_mult_f16_4x4_mve()
323 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
324 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
326 vecA = vldrhq_f16(pInA1); in arm_mat_cmplx_mult_f16_4x4_mve()
327 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
328 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
330 vecA = vldrhq_f16(pInA2); in arm_mat_cmplx_mult_f16_4x4_mve()
331 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
332 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
334 vecA = vldrhq_f16(pInA3); in arm_mat_cmplx_mult_f16_4x4_mve()
335 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
336 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
351 vecA = vldrhq_f16(pInA0); in arm_mat_cmplx_mult_f16_4x4_mve()
352 acc0 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
353 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
355 vecA = vldrhq_f16(pInA1); in arm_mat_cmplx_mult_f16_4x4_mve()
356 acc1 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
357 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
359 vecA = vldrhq_f16(pInA2); in arm_mat_cmplx_mult_f16_4x4_mve()
360 acc2 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
361 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
363 vecA = vldrhq_f16(pInA3); in arm_mat_cmplx_mult_f16_4x4_mve()
364 acc3 = vcmulq(vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
365 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16_4x4_mve()
503 f16x8_t vecB, vecA; in arm_mat_cmplx_mult_f16() local
511 vecA = vld1q(pSrcA0Vec); pSrcA0Vec += 8; in arm_mat_cmplx_mult_f16()
512 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
513 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
515 vecA = vld1q(pSrcA1Vec); pSrcA1Vec += 8; in arm_mat_cmplx_mult_f16()
516 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16()
517 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16()
519 vecA = vld1q(pSrcA2Vec); pSrcA2Vec += 8; in arm_mat_cmplx_mult_f16()
520 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16()
521 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16()
523 vecA = vld1q(pSrcA3Vec); pSrcA3Vec += 8; in arm_mat_cmplx_mult_f16()
524 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16()
525 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16()
540 f16x8_t vecB, vecA; in arm_mat_cmplx_mult_f16() local
548 vecA = vld1q(pSrcA0Vec); in arm_mat_cmplx_mult_f16()
549 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
550 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
552 vecA = vld1q(pSrcA1Vec); in arm_mat_cmplx_mult_f16()
553 acc1 = vcmlaq(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16()
554 acc1 = vcmlaq_rot90(acc1, vecA, vecB); in arm_mat_cmplx_mult_f16()
556 vecA = vld1q(pSrcA2Vec); in arm_mat_cmplx_mult_f16()
557 acc2 = vcmlaq(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16()
558 acc2 = vcmlaq_rot90(acc2, vecA, vecB); in arm_mat_cmplx_mult_f16()
560 vecA = vld1q(pSrcA3Vec); in arm_mat_cmplx_mult_f16()
561 acc3 = vcmlaq(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16()
562 acc3 = vcmlaq_rot90(acc3, vecA, vecB); in arm_mat_cmplx_mult_f16()
639 f16x8_t vecB, vecA; in arm_mat_cmplx_mult_f16() local
647 vecA = vld1q(pSrcA0Vec); in arm_mat_cmplx_mult_f16()
649 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
650 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
664 f16x8_t vecB, vecA; in arm_mat_cmplx_mult_f16() local
668 vecA = vld1q(pSrcA0Vec); in arm_mat_cmplx_mult_f16()
669 acc0 = vcmlaq(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()
670 acc0 = vcmlaq_rot90(acc0, vecA, vecB); in arm_mat_cmplx_mult_f16()