Home
last modified time | relevance | path

Searched refs:vecSrc (Results 1 – 25 of 77) sorted by relevance

1234

/cmsis-dsp-3.5.0/Source/ComplexMathFunctions/
Darm_cmplx_mag_f16.c63 f16x8x2_t vecSrc; in arm_cmplx_mag_f16() local
73 vecSrc = vld2q(pSrc); in arm_cmplx_mag_f16()
75 sum = vmulq(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_f16()
76 sum = vfmaq(sum, vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_f16()
120 vecSrc = vld2q((float16_t const *)pSrc); in arm_cmplx_mag_f16()
121 sum = vmulq(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_f16()
122 sum = vfmaq(sum, vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_f16()
Darm_cmplx_mag_q15.c67 q15x8x2_t vecSrc; in arm_cmplx_mag_q15() local
82 vecSrc = vld2q(pSrc); in arm_cmplx_mag_q15()
88 prod0 = vmullbq_int_s16(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_q15()
91 prod0 = vmullbq_int_s16(vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_q15()
95 prod1 = vmulltq_int_s16(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_q15()
98 prod1 = vmulltq_int_s16(vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_q15()
Darm_cmplx_mag_squared_f16.c59 f16x8x2_t vecSrc; in arm_cmplx_mag_squared_f16() local
66 vecSrc = vld2q(pSrc); in arm_cmplx_mag_squared_f16()
67 sum = vmulq_m(vuninitializedq_f16(),vecSrc.val[0], vecSrc.val[0],p); in arm_cmplx_mag_squared_f16()
68 sum = vfmaq_m(sum, vecSrc.val[1], vecSrc.val[1],p); in arm_cmplx_mag_squared_f16()
Darm_cmplx_mag_q31.c64 q31x4x2_t vecSrc; in arm_cmplx_mag_q31() local
74 vecSrc = vld2q(pSrc); in arm_cmplx_mag_q31()
76 sum = vqaddq(vmulhq(vecSrc.val[0], vecSrc.val[0]), in arm_cmplx_mag_q31()
77 vmulhq(vecSrc.val[1], vecSrc.val[1])); in arm_cmplx_mag_q31()
/cmsis-dsp-3.5.0/Source/StatisticsFunctions/
Darm_absmin_f32.c72 f32x4_t vecSrc; in arm_absmin_f32() local
89 vecSrc = vldrwq_f32(pSrcVec); in arm_absmin_f32()
91 vecSrc = vabsq(vecSrc); in arm_absmin_f32()
96 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_f32()
97 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f32()
115 vecSrc = vldrwq_f32(pSrcVec); in arm_absmin_f32()
117 vecSrc = vabsq(vecSrc); in arm_absmin_f32()
122 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_f32()
123 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f32()
Darm_absmin_f16.c67 f16x8_t vecSrc; in arm_absmin_f16() local
84 vecSrc = vldrhq_f16(pSrcVec); in arm_absmin_f16()
86 vecSrc = vabsq(vecSrc); in arm_absmin_f16()
91 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_f16()
92 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f16()
110 vecSrc = vldrhq_f16(pSrcVec); in arm_absmin_f16()
112 vecSrc = vabsq(vecSrc); in arm_absmin_f16()
117 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_f16()
118 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f16()
Darm_absmax_f16.c66 f16x8_t vecSrc; in arm_absmax_f16() local
83 vecSrc = vldrhq_f16(pSrcVec); in arm_absmax_f16()
85 vecSrc = vabsq(vecSrc); in arm_absmax_f16()
90 p0 = vcmpgeq(vecSrc, curExtremValVec); in arm_absmax_f16()
91 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmax_f16()
107 vecSrc = vldrhq_f16(pSrcVec); in arm_absmax_f16()
109 vecSrc = vabsq(vecSrc); in arm_absmax_f16()
116 p0 = vcmpgeq_m(vecSrc, curExtremValVec, p0); in arm_absmax_f16()
117 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmax_f16()
Darm_absmin_q15.c60 q15x8_t vecSrc; in arm_absmin_q15() local
78 vecSrc = vld1q(pSrcVec); in arm_absmin_q15()
80 vecSrc = vabsq(vecSrc); in arm_absmin_q15()
85 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_q15()
86 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q15()
102 vecSrc = vld1q(pSrcVec); in arm_absmin_q15()
104 vecSrc = vabsq(vecSrc); in arm_absmin_q15()
111 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_q15()
112 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q15()
Darm_absmin_q31.c61 q31x4_t vecSrc; in arm_absmin_q31() local
79 vecSrc = vldrwq_s32(pSrcVec); in arm_absmin_q31()
81 vecSrc = vabsq(vecSrc); in arm_absmin_q31()
86 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_q31()
87 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q31()
103 vecSrc = vldrwq_s32(pSrcVec); in arm_absmin_q31()
105 vecSrc = vabsq(vecSrc); in arm_absmin_q31()
112 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_q31()
113 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q31()
Darm_absmin_q7.c63 q7x16_t vecSrc; in arm_small_blk_absmin_q7() local
81 vecSrc = vld1q(pSrcVec); in arm_small_blk_absmin_q7()
83 vecSrc = vabsq(vecSrc); in arm_small_blk_absmin_q7()
88 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_small_blk_absmin_q7()
89 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_small_blk_absmin_q7()
105 vecSrc = vld1q(pSrcVec); in arm_small_blk_absmin_q7()
107 vecSrc = vabsq(vecSrc); in arm_small_blk_absmin_q7()
114 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_small_blk_absmin_q7()
115 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_small_blk_absmin_q7()
Darm_var_q31.c67 q31x4_t vecSrc; in arm_var_q31() local
83 vecSrc = vldrwq_s32(pSrc); in arm_var_q31()
89 vecSrc = vshrq(vecSrc, 8); in arm_var_q31()
90 sumOfSquares = vmlaldavaq(sumOfSquares, vecSrc, vecSrc); in arm_var_q31()
91 sum = vaddlvaq(sum, vecSrc); in arm_var_q31()
Darm_min_f16.c66 f16x8_t vecSrc; in arm_min_f16() local
82 vecSrc = vldrhq_f16(pSrcVec); pSrcVec += 8; in arm_min_f16()
87 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_min_f16()
88 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_min_f16()
104 vecSrc = vldrhq_f16(pSrcVec); pSrcVec += 8; in arm_min_f16()
110 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_min_f16()
111 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_min_f16()
Darm_max_no_idx_q15.c59 q15x8_t vecSrc; in arm_max_no_idx_q15() local
70 vecSrc = vld1q(pSrcVec); in arm_max_no_idx_q15()
75 curExtremValVec = vmaxq(vecSrc, curExtremValVec); in arm_max_no_idx_q15()
88 vecSrc = vld1q(pSrcVec); in arm_max_no_idx_q15()
95 curExtremValVec = vmaxq_m(curExtremValVec, vecSrc, curExtremValVec, p0); in arm_max_no_idx_q15()
Darm_min_no_idx_q15.c59 q15x8_t vecSrc; in arm_min_no_idx_q15() local
70 vecSrc = vld1q(pSrcVec); in arm_min_no_idx_q15()
75 curExtremValVec = vminq(vecSrc, curExtremValVec); in arm_min_no_idx_q15()
88 vecSrc = vld1q(pSrcVec); in arm_min_no_idx_q15()
95 curExtremValVec = vminq_m(curExtremValVec, vecSrc, curExtremValVec, p0); in arm_min_no_idx_q15()
Darm_min_no_idx_q31.c58 q31x4_t vecSrc; in arm_min_no_idx_q31() local
69 vecSrc = vldrwq_s32(pSrcVec); in arm_min_no_idx_q31()
74 curExtremValVec = vminq(vecSrc, curExtremValVec); in arm_min_no_idx_q31()
87 vecSrc = vldrwq_s32(pSrcVec); in arm_min_no_idx_q31()
94 curExtremValVec = vminq_m(curExtremValVec, vecSrc, curExtremValVec, p0); in arm_min_no_idx_q31()
Darm_min_no_idx_q7.c58 q7x16_t vecSrc; in arm_min_no_idx_q7() local
69 vecSrc = vld1q(pSrcVec); in arm_min_no_idx_q7()
74 curExtremValVec = vminq(vecSrc, curExtremValVec); in arm_min_no_idx_q7()
87 vecSrc = vld1q(pSrcVec); in arm_min_no_idx_q7()
94 curExtremValVec = vminq_m(curExtremValVec, vecSrc, curExtremValVec, p0); in arm_min_no_idx_q7()
Darm_max_no_idx_q31.c59 q31x4_t vecSrc; in arm_max_no_idx_q31() local
70 vecSrc = vldrwq_s32(pSrcVec); in arm_max_no_idx_q31()
75 curExtremValVec = vmaxq(vecSrc, curExtremValVec); in arm_max_no_idx_q31()
88 vecSrc = vldrwq_s32(pSrcVec); in arm_max_no_idx_q31()
95 curExtremValVec = vmaxq_m(curExtremValVec, vecSrc, curExtremValVec, p0); in arm_max_no_idx_q31()
Darm_max_no_idx_q7.c59 q7x16_t vecSrc; in arm_max_no_idx_q7() local
70 vecSrc = vld1q(pSrcVec); in arm_max_no_idx_q7()
75 curExtremValVec = vmaxq(vecSrc, curExtremValVec); in arm_max_no_idx_q7()
88 vecSrc = vld1q(pSrcVec); in arm_max_no_idx_q7()
95 curExtremValVec = vmaxq_m(curExtremValVec, vecSrc, curExtremValVec, p0); in arm_max_no_idx_q7()
Darm_var_f16.c63 f16x8_t vecSrc; in arm_var_f16() local
79 vecSrc = vldrhq_z_f16((float16_t const *) pSrc, p); in arm_var_f16()
83 vecSrc = vsubq_m(vuninitializedq_f16(), vecSrc, fMean, p); in arm_var_f16()
84 sumVec = vfmaq_m(sumVec, vecSrc, vecSrc, p); in arm_var_f16()
/cmsis-dsp-3.5.0/Source/BasicMathFunctions/
Darm_not_u16.c64 uint16x8_t vecSrc; in arm_not_u16() local
71 vecSrc = vld1q(pSrc); in arm_not_u16()
73 vst1q(pDst, vmvnq_u16(vecSrc) ); in arm_not_u16()
88 vecSrc = vld1q(pSrc); in arm_not_u16()
89 vstrhq_p(pDst, vmvnq_u16(vecSrc), p0); in arm_not_u16()
Darm_not_u8.c56 uint8x16_t vecSrc; in arm_not_u8() local
63 vecSrc = vld1q(pSrc); in arm_not_u8()
65 vst1q(pDst, vmvnq_u8(vecSrc) ); in arm_not_u8()
80 vecSrc = vld1q(pSrc); in arm_not_u8()
81 vstrbq_p(pDst, vmvnq_u8(vecSrc), p0); in arm_not_u8()
Darm_not_u32.c56 uint32x4_t vecSrc; in arm_not_u32() local
63 vecSrc = vld1q(pSrc); in arm_not_u32()
65 vst1q(pDst, vmvnq_u32(vecSrc) ); in arm_not_u32()
80 vecSrc = vld1q(pSrc); in arm_not_u32()
81 vstrwq_p(pDst, vmvnq_u32(vecSrc), p0); in arm_not_u32()
Darm_scale_q7.c66 q7x16_t vecSrc; in arm_scale_q7() local
80 vecSrc = vld1q(pSrc); in arm_scale_q7()
82 low = vmullbq_int(vecSrc, vdupq_n_s8(scaleFract)); in arm_scale_q7()
85 high = vmulltq_int(vecSrc, vdupq_n_s8(scaleFract)); in arm_scale_q7()
107 vecSrc = vld1q(pSrc); in arm_scale_q7()
108 low = vmullbq_int_s8(vecSrc, vdupq_n_s8(scaleFract)); in arm_scale_q7()
112 high = vmulltq_int_s8(vecSrc, vdupq_n_s8(scaleFract)); in arm_scale_q7()
Darm_scale_q15.c66 q15x8_t vecSrc; in arm_scale_q15() local
79 vecSrc = vld1q(pSrc); in arm_scale_q15()
80 low = vmullbq_int(vecSrc, vdupq_n_s16(scaleFract)); in arm_scale_q15()
84 high = vmulltq_int(vecSrc, vdupq_n_s16(scaleFract)); in arm_scale_q15()
106 vecSrc = vld1q(pSrc); in arm_scale_q15()
107 low = vmullbq_int(vecSrc, vdupq_n_s16(scaleFract)); in arm_scale_q15()
111 high = vmulltq_int(vecSrc, vdupq_n_s16(scaleFract)); in arm_scale_q15()
Darm_offset_q31.c64 q31x4_t vecSrc; in arm_offset_q31() local
74 vecSrc = vld1q(pSrc); in arm_offset_q31()
75 vst1q(pDst, vqaddq(vecSrc, offset)); in arm_offset_q31()
93 vecSrc = vld1q(pSrc); in arm_offset_q31()
94 vstrwq_p(pDst, vqaddq(vecSrc, offset), p0); in arm_offset_q31()

1234