1 /***************************************************************************//**
2 * \file cy_crypto_core_hw_vu.h
3 * \version 2.40
4 *
5 * \brief
6 *  This file provides constants and function prototypes
7 *  for the Vector Unit functions in the Crypto block driver.
8 *
9 ********************************************************************************
10 * Copyright 2016-2020 Cypress Semiconductor Corporation
11 * SPDX-License-Identifier: Apache-2.0
12 *
13 * Licensed under the Apache License, Version 2.0 (the "License");
14 * you may not use this file except in compliance with the License.
15 * You may obtain a copy of the License at
16 *
17 *    http://www.apache.org/licenses/LICENSE-2.0
18 *
19 * Unless required by applicable law or agreed to in writing, software
20 * distributed under the License is distributed on an "AS IS" BASIS,
21 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 * See the License for the specific language governing permissions and
23 * limitations under the License.
24 *******************************************************************************/
25 
26 
27 #if !defined (CY_CRYPTO_CORE_HW_VU_H)
28 #define CY_CRYPTO_CORE_HW_VU_H
29 
30 #include "cy_crypto_core_hw.h"
31 
32 #if defined (CY_IP_MXCRYPTO)
33 
34 #if defined(__cplusplus)
35 extern "C" {
36 #endif
37 
38 #if (CPUSS_CRYPTO_VU == 1)
39 
40 /***************************************
41 *        Crypto IP opcodes
42 ***************************************/
43 
44 #define CY_CRYPTO_VU_HW_REG0            (0u)
45 #define CY_CRYPTO_VU_HW_REG1            (1u)
46 #define CY_CRYPTO_VU_HW_REG2            (2u)
47 #define CY_CRYPTO_VU_HW_REG3            (3u)
48 #define CY_CRYPTO_VU_HW_REG4            (4u)
49 #define CY_CRYPTO_VU_HW_REG5            (5u)
50 #define CY_CRYPTO_VU_HW_REG6            (6u)
51 #define CY_CRYPTO_VU_HW_REG7            (7u)
52 #define CY_CRYPTO_VU_HW_REG8            (8u)
53 #define CY_CRYPTO_VU_HW_REG9            (9u)
54 #define CY_CRYPTO_VU_HW_REG10           (10u)
55 #define CY_CRYPTO_VU_HW_REG11           (11u)
56 #define CY_CRYPTO_VU_HW_REG12           (12u)
57 #define CY_CRYPTO_VU_HW_REG13           (13u)
58 #define CY_CRYPTO_VU_HW_REG14           (14u)
59 #define CY_CRYPTO_VU_HW_REG15           (15u)
60 
61 /* Crypto IP condition codes (vector unit) */
62 #define CY_CRYPTO_VU_COND_ALWAYS        (0x00u)
63 #define CY_CRYPTO_VU_COND_EQ            (0x01u)
64 #define CY_CRYPTO_VU_COND_NE            (0x02u)
65 #define CY_CRYPTO_VU_COND_CS            (0x03u)
66 #define CY_CRYPTO_VU_COND_CC            (0x04u)
67 #define CY_CRYPTO_VU_COND_HI            (0x05u)
68 #define CY_CRYPTO_VU_COND_LS            (0x06u)
69 #define CY_CRYPTO_VU_COND_EVEN          (0x07u)
70 #define CY_CRYPTO_VU_COND_ODD           (0x08u)
71 #define CY_CRYPTO_VU_COND_MASK          (0x1FFu)
72 
73 /* Crypto IP status (vector unit) */
74 #define CY_CRYPTO_VU_STATUS_CARRY       (0u)
75 #define CY_CRYPTO_VU_STATUS_EVEN        (1u)
76 #define CY_CRYPTO_VU_STATUS_ZERO        (2u)
77 #define CY_CRYPTO_VU_STATUS_ONE         (3u)
78 #define CY_CRYPTO_VU_STATUS_CARRY_BIT   (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_CARRY)
79 #define CY_CRYPTO_VU_STATUS_EVEN_BIT    (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_EVEN)
80 #define CY_CRYPTO_VU_STATUS_ZERO_BIT    (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_ZERO)
81 #define CY_CRYPTO_VU_STATUS_ONE_BIT     (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_ONE)
82 #define CY_CRYPTO_VU_STATUS_MASK        (uint32_t)(CY_CRYPTO_VU_STATUS_CARRY_BIT | CY_CRYPTO_VU_STATUS_EVEN_BIT \
83                                                    CY_CRYPTO_VU_STATUS_ZERO_BIT | CY_CRYPTO_VU_STATUS_ONE_BIT)
84 
85 #define CY_CRYPTO_VU_REG_BIT(nreg)      (((uint32_t)1u) << (nreg))
86 
87 /* Crypto registers field processing (vector unit) */
88 #define CY_CRYPTO_VU_GET_REG_DATA(data)       (((data) >> 16U) & 0x00003fffUL)
89 #define CY_CRYPTO_VU_GET_REG_SIZE(data)       ((data) & 0x00000fffUL)
90 
91 #define CY_CRYPTO_VU_SIZE_FLD_MASK            (0x00001fffuL)
92 #define CY_CRYPTO_VU_DATA_FLD_MASK            (0x00003fffuL)
93 #define CY_CRYPTO_VU_DATA_FLD_POS             (16u)
94 
95 /* Vector Unit instructions */
96 #define CY_CRYPTO_VU_ALLOC_MEM_OPC            (0x12u)
97 #define CY_CRYPTO_VU_FREE_MEM_OPC             (0x13u)
98 
99 /* Instructions with register operand only, category I */
100 #define CY_CRYPTO_VU_SET_REG_OPC              (0x80u)
101 
102 #define CY_CRYPTO_VU_LD_REG_OPC               (0x00u)
103 #define CY_CRYPTO_VU_ST_REG_OPC               (0x01u)
104 #define CY_CRYPTO_VU_MOV_REG_OPC              (0x02u)
105 
106 /* Instructions with register operand only, category III */
107 #define CY_CRYPTO_VU_SWAP_REG_OPC             (0x03u)
108 
109 /* Instructions with register operand only, category IV */
110 #define CY_CRYPTO_VU_MOV_REG_TO_STATUS_OPC    (0x04u)
111 
112 /* Instructions with register operand only, category V */
113 #define CY_CRYPTO_VU_MOV_STATUS_TO_REG_OPC    (0x05u)
114 
115 #define CY_CRYPTO_VU2_MOV_IMM_TO_STATUS_OPC   (0x0Fu)
116 
117 /* Instructions with register operand only, category VI */
118 #define CY_CRYPTO_VU_PUSH_REG_OPC             (0x10u)
119 #define CY_CRYPTO_VU_POP_REG_OPC              (0x11u)
120 
121 /* Instructions with register operand only, category VII */
122 #define CY_CRYPTO_VU_ADD_REG_OPC              (0x06u)
123 #define CY_CRYPTO_VU_SUB_REG_OPC              (0x07u)
124 #define CY_CRYPTO_VU_OR_REG_OPC               (0x08u)
125 #define CY_CRYPTO_VU_AND_REG_OPC              (0x09u)
126 #define CY_CRYPTO_VU_XOR_REG_OPC              (0x0Au)
127 #define CY_CRYPTO_VU_NOR_REG_OPC              (0x0Bu)
128 #define CY_CRYPTO_VU_NAND_REG_OPC             (0x0Cu)
129 #define CY_CRYPTO_VU_MIN_REG_OPC              (0x0Du)
130 #define CY_CRYPTO_VU_MAX_REG_OPC              (0x0Eu)
131 
132 /* Instructions with mixed operands, category I */
133 #define CY_CRYPTO_VU_LSL_OPC                  (0x20u)
134 #define CY_CRYPTO_VU1_LSR_OPC                 (0x24u)
135 #define CY_CRYPTO_VU2_LSR_OPC                 (0x23u)
136 
137 /* Instructions with mixed operands, category II */
138 #define CY_CRYPTO_VU_LSL1_OPC                 (0x21u)
139 #define CY_CRYPTO_VU_LSL1_WITH_CARRY_OPC      (0x22u)
140 
141 #define CY_CRYPTO_VU1_LSR1_OPC                (0x25u)
142 #define CY_CRYPTO_VU1_LSR1_WITH_CARRY_OPC     (0x26u)
143 #define CY_CRYPTO_VU2_LSR1_OPC                (0x24u)
144 #define CY_CRYPTO_VU2_LSR1_WITH_CARRY_OPC     (0x25u)
145 
146 /* Instructions with mixed operands, category III */
147 #define CY_CRYPTO_VU1_SET_BIT_OPC             (0x2Cu)
148 #define CY_CRYPTO_VU1_CLR_BIT_OPC             (0x2Du)
149 #define CY_CRYPTO_VU1_INV_BIT_OPC             (0x2Eu)
150 
151 #define CY_CRYPTO_VU2_SET_BIT_OPC             (0x28u)
152 #define CY_CRYPTO_VU2_CLR_BIT_OPC             (0x29u)
153 #define CY_CRYPTO_VU2_INV_BIT_OPC             (0x2Au)
154 
155 /* Instructions with mixed operands, category IV */
156 #define CY_CRYPTO_VU1_GET_BIT_OPC             (0x2Fu)
157 #define CY_CRYPTO_VU2_GET_BIT_OPC             (0x2Bu)
158 
159 /* Instructions with mixed operands, category V */
160 #define CY_CRYPTO_VU1_CLSAME_OPC              (0x28u)
161 #define CY_CRYPTO_VU1_CTSAME_OPC              (0x29u)
162 
163 #define CY_CRYPTO_VU2_CLSAME_OPC              (0x26u)
164 #define CY_CRYPTO_VU2_CTSAME_OPC              (0x27u)
165 
166 /* Instructions with memory buffer operands, category I */
167 #define CY_CRYPTO_VU_SET_TO_ZERO_OPC          (0x34u)
168 #define CY_CRYPTO_VU_SET_TO_ONE_OPC           (0x35u)
169 
170 /* Instructions with memory buffer operands, category II */
171 #define CY_CRYPTO_VU_MOV_OPC                  (0x30u)
172 #define CY_CRYPTO_VU_XSQUARE_OPC              (0x31u)
173 #define CY_CRYPTO_VU2_USQUARE_OPC             (0x2Fu)
174 
175 /* Instructions with memory buffer operands, category III */
176 #define CY_CRYPTO_VU_CMP_SUB_OPC              (0x3Du)
177 #define CY_CRYPTO_VU_CMP_DEGREE_OPC           (0x3Eu)
178 
179 /* Instructions with memory buffer operands, category IV */
180 #define CY_CRYPTO_VU_TST_OPC                  (0x3fu)
181 
182 /* Instructions with memory buffer operands, category V */
183 #define CY_CRYPTO_VU_XMUL_OPC                 (0x32u)
184 #define CY_CRYPTO_VU_UMUL_OPC                 (0x33u)
185 #define CY_CRYPTO_VU_ADD_OPC                  (0x36u)
186 #define CY_CRYPTO_VU_SUB_OPC                  (0x37u)
187 #define CY_CRYPTO_VU_OR_OPC                   (0x38u)
188 #define CY_CRYPTO_VU_AND_OPC                  (0x39u)
189 #define CY_CRYPTO_VU_XOR_OPC                  (0x3Au)
190 #define CY_CRYPTO_VU_NOR_OPC                  (0x3Bu)
191 #define CY_CRYPTO_VU_NAND_OPC                 (0x3Cu)
192 
193 /* Instructions with memory buffer operands, category VI */
194 #define CY_CRYPTO_VU2_SET_BIT_IMM_OPC         (0x2Cu)
195 #define CY_CRYPTO_VU2_CLR_BIT_IMM_OPC         (0x2Du)
196 #define CY_CRYPTO_VU2_INV_BIT_IMM_OPC         (0x2Eu)
197 
198 #define CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base)  do { ; } while (0uL != _FLD2VAL(CRYPTO_STATUS_VU_BUSY, REG_CRYPTO_STATUS(base)))
199 #define CY_CRYPTO_VU_READ_SP_REG(base)         CY_CRYPTO_VU_GET_REG_DATA(REG_CRYPTO_VU_RF_DATA(base, 15u))
200 
201 __STATIC_INLINE void CY_CRYPTO_VU_SAVE_REG (CRYPTO_Type *base, uint32_t rsrc, uint32_t *data);
202 __STATIC_INLINE void CY_CRYPTO_VU_RESTORE_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data);
203 __STATIC_INLINE void CY_CRYPTO_VU_SET_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data, uint32_t size);
204 
205 
CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc)206 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc)
207 {
208     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
209                                      (uint32_t)CY_CRYPTO_VU_MOV_REG_TO_STATUS_OPC,
210                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
211                                     ((uint32_t)rsrc));
212 }
213 
CY_CRYPTO_VU_MOV_REG_TO_STATUS(CRYPTO_Type * base,uint32_t rsrc)214 __STATIC_INLINE void CY_CRYPTO_VU_MOV_REG_TO_STATUS (CRYPTO_Type *base, uint32_t rsrc)
215 {
216     CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc);
217 }
218 
CY_CRYPTO_VU_COND_MOV_STATUS_TO_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst)219 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_STATUS_TO_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst)
220 {
221     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
222                                      (uint32_t)CY_CRYPTO_VU_MOV_STATUS_TO_REG_OPC,
223                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
224                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT));
225 }
226 
CY_CRYPTO_VU_MOV_STATUS_TO_REG(CRYPTO_Type * base,uint32_t rdst)227 __STATIC_INLINE void CY_CRYPTO_VU_MOV_STATUS_TO_REG (CRYPTO_Type *base, uint32_t rdst)
228 {
229     CY_CRYPTO_VU_COND_MOV_STATUS_TO_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst);
230 }
231 
CY_CRYPTO_VU_COND_MOV_IMM_TO_STATUS(CRYPTO_Type * base,uint32_t cc,uint32_t imm4)232 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_IMM_TO_STATUS (CRYPTO_Type *base, uint32_t cc, uint32_t imm4)
233 {
234     if (CY_CRYPTO_V1)
235     {
236         /******* V1 *********/
237         uint32_t tmpReg = CY_CRYPTO_VU_HW_REG14;
238         uint32_t tmpData;
239 
240         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
241 
242         /* Load 4 bit immediate value */
243         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm4, 4u);
244         CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS(base, cc, tmpReg);
245 
246         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
247 
248         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
249     }
250     else
251     {
252         /******* V2 *********/
253             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
254                                          (uint32_t)CY_CRYPTO_VU2_MOV_IMM_TO_STATUS_OPC,
255                                         ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
256                                         ((uint32_t)imm4 << CY_CRYPTO_RSRC0_SHIFT));
257     }
258 }
259 
CY_CRYPTO_VU_MOV_IMM_TO_STATUS(CRYPTO_Type * base,uint32_t imm4)260 __STATIC_INLINE void CY_CRYPTO_VU_MOV_IMM_TO_STATUS (CRYPTO_Type *base, uint32_t imm4)
261 {
262     CY_CRYPTO_VU_COND_MOV_IMM_TO_STATUS (base, CY_CRYPTO_VU_COND_ALWAYS, imm4);
263 }
264 
265 /*******************************************************************************/
CY_CRYPTO_VU_SET_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t data,uint32_t size)266 __STATIC_INLINE void CY_CRYPTO_VU_SET_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data, uint32_t size)
267 {
268     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
269                             (uint32_t)CY_CRYPTO_VU_SET_REG_OPC,
270                            ((uint32_t)rdst << CY_CRYPTO_RSRC26_SHIFT) |
271                            ((uint32_t)data << ((CY_CRYPTO_V1) ? CY_CRYPTO_RSRC12_SHIFT : CY_CRYPTO_RSRC13_SHIFT)) |
272                            (((uint32_t)size - 1u) << CY_CRYPTO_RSRC0_SHIFT));
273 }
274 
CY_CRYPTO_VU_COND_LD_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)275 __STATIC_INLINE void CY_CRYPTO_VU_COND_LD_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
276 {
277     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
278                             (uint32_t)CY_CRYPTO_VU_LD_REG_OPC,
279                            ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
280                            ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
281                            ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
282 }
283 
CY_CRYPTO_VU_LD_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)284 __STATIC_INLINE void CY_CRYPTO_VU_LD_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
285 {
286     CY_CRYPTO_VU_COND_LD_REG(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
287 }
288 
CY_CRYPTO_VU_COND_ST_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)289 __STATIC_INLINE void CY_CRYPTO_VU_COND_ST_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
290 {
291     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
292                             (uint32_t)CY_CRYPTO_VU_ST_REG_OPC,
293                            ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
294                            ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
295                            ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
296 }
297 
CY_CRYPTO_VU_ST_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)298 __STATIC_INLINE void CY_CRYPTO_VU_ST_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
299 {
300     CY_CRYPTO_VU_COND_ST_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
301 }
302 
CY_CRYPTO_VU_COND_MOV_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)303 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
304 {
305     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
306                                      (uint32_t)CY_CRYPTO_VU_MOV_REG_TO_STATUS_OPC,
307                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
308                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
309                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
310 }
311 
CY_CRYPTO_VU_MOV_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)312 __STATIC_INLINE void CY_CRYPTO_VU_MOV_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
313 {
314     CY_CRYPTO_VU_COND_MOV_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
315 }
316 
CY_CRYPTO_VU_COND_SWAP_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc1,uint32_t rsrc0)317 __STATIC_INLINE void CY_CRYPTO_VU_COND_SWAP_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc1, uint32_t rsrc0)
318 {
319     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
320                                      (uint32_t)CY_CRYPTO_VU_SWAP_REG_OPC,
321                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
322                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
323                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
324 }
325 
CY_CRYPTO_VU_SWAP_REG(CRYPTO_Type * base,uint32_t rsrc1,uint32_t rsrc0)326 __STATIC_INLINE void CY_CRYPTO_VU_SWAP_REG (CRYPTO_Type *base, uint32_t rsrc1, uint32_t rsrc0)
327 {
328     CY_CRYPTO_VU_COND_SWAP_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc1, rsrc0);
329 }
330 
CY_CRYPTO_VU_COND_ADD_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)331 __STATIC_INLINE void CY_CRYPTO_VU_COND_ADD_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
332 {
333     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
334                                      (uint32_t)CY_CRYPTO_VU_ADD_REG_OPC,
335                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
336                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
337                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
338                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
339 }
340 
CY_CRYPTO_VU_ADD_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)341 __STATIC_INLINE void CY_CRYPTO_VU_ADD_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
342 {
343     CY_CRYPTO_VU_COND_ADD_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
344 }
345 
CY_CRYPTO_VU_COND_SUB_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)346 __STATIC_INLINE void CY_CRYPTO_VU_COND_SUB_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
347 {
348     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
349                                      (uint32_t)CY_CRYPTO_VU_SUB_REG_OPC,
350                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
351                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
352                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
353                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
354 }
355 
CY_CRYPTO_VU_SUB_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)356 __STATIC_INLINE void CY_CRYPTO_VU_SUB_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
357 {
358     CY_CRYPTO_VU_COND_SUB_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
359 }
360 
CY_CRYPTO_VU_COND_OR_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)361 __STATIC_INLINE void CY_CRYPTO_VU_COND_OR_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
362 {
363     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
364                                      (uint32_t)CY_CRYPTO_VU_OR_REG_OPC,
365                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
366                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
367                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
368                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
369 }
370 
CY_CRYPTO_VU_OR_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)371 __STATIC_INLINE void CY_CRYPTO_VU_OR_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
372 {
373     CY_CRYPTO_VU_COND_OR_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
374 }
375 
CY_CRYPTO_VU_COND_AND_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)376 __STATIC_INLINE void CY_CRYPTO_VU_COND_AND_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
377 {
378     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
379                                      (uint32_t)CY_CRYPTO_VU_AND_REG_OPC,
380                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
381                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
382                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
383                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
384 }
385 
CY_CRYPTO_VU_AND_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)386 __STATIC_INLINE void CY_CRYPTO_VU_AND_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
387 {
388     CY_CRYPTO_VU_COND_AND_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
389 }
390 
CY_CRYPTO_VU_COND_XOR_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)391 __STATIC_INLINE void CY_CRYPTO_VU_COND_XOR_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
392 {
393     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
394                                      (uint32_t)CY_CRYPTO_VU_XOR_REG_OPC,
395                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
396                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
397                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
398                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
399 }
400 
CY_CRYPTO_VU_XOR_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)401 __STATIC_INLINE void CY_CRYPTO_VU_XOR_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
402 {
403     CY_CRYPTO_VU_COND_XOR_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
404 }
405 
CY_CRYPTO_VU_COND_NOR_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)406 __STATIC_INLINE void CY_CRYPTO_VU_COND_NOR_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
407 {
408     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
409                                      (uint32_t)CY_CRYPTO_VU_NOR_REG_OPC,
410                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
411                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
412                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
413                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
414 }
415 
CY_CRYPTO_VU_NOR_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)416 __STATIC_INLINE void CY_CRYPTO_VU_NOR_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
417 {
418     CY_CRYPTO_VU_COND_NOR_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
419 }
420 
CY_CRYPTO_VU_COND_NAND_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)421 __STATIC_INLINE void CY_CRYPTO_VU_COND_NAND_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
422 {
423     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
424                                      (uint32_t)CY_CRYPTO_VU_NAND_REG_OPC,
425                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
426                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
427                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
428                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
429 }
430 
CY_CRYPTO_VU_NAND_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)431 __STATIC_INLINE void CY_CRYPTO_VU_NAND_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
432 {
433     CY_CRYPTO_VU_COND_NAND_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
434 }
435 
CY_CRYPTO_VU_COND_MIN_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)436 __STATIC_INLINE void CY_CRYPTO_VU_COND_MIN_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
437 {
438     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
439                                      (uint32_t)CY_CRYPTO_VU_MIN_REG_OPC,
440                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
441                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
442                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
443                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
444 }
445 
CY_CRYPTO_VU_MIN_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)446 __STATIC_INLINE void CY_CRYPTO_VU_MIN_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
447 {
448     CY_CRYPTO_VU_COND_MIN_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
449 }
450 
CY_CRYPTO_VU_COND_MAX_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)451 __STATIC_INLINE void CY_CRYPTO_VU_COND_MAX_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
452 {
453     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
454                                      (uint32_t)CY_CRYPTO_VU_MAX_REG_OPC,
455                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
456                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
457                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
458                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
459 }
460 
CY_CRYPTO_VU_MAX_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)461 __STATIC_INLINE void CY_CRYPTO_VU_MAX_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
462 {
463     CY_CRYPTO_VU_COND_MAX_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
464 }
465 
CY_CRYPTO_VU_COND_PUSH_REG(CRYPTO_Type * base,uint32_t cc)466 __STATIC_INLINE void CY_CRYPTO_VU_COND_PUSH_REG (CRYPTO_Type *base, uint32_t cc)
467 {
468     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
469                                     (uint32_t)CY_CRYPTO_VU_PUSH_REG_OPC,
470                                    ((uint32_t)cc << CY_CRYPTO_RSRC20_SHIFT));
471 }
472 
CY_CRYPTO_VU_PUSH_REG(CRYPTO_Type * base)473 __STATIC_INLINE void CY_CRYPTO_VU_PUSH_REG (CRYPTO_Type *base)
474 {
475     CY_CRYPTO_VU_COND_PUSH_REG (base, CY_CRYPTO_VU_COND_ALWAYS);
476 }
477 
CY_CRYPTO_VU_COND_POP_REG(CRYPTO_Type * base,uint32_t cc)478 __STATIC_INLINE void CY_CRYPTO_VU_COND_POP_REG (CRYPTO_Type *base, uint32_t cc)
479 {
480     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
481                                     (uint32_t)CY_CRYPTO_VU_POP_REG_OPC,
482                                    ((uint32_t)cc << CY_CRYPTO_RSRC20_SHIFT));
483 }
484 
CY_CRYPTO_VU_POP_REG(CRYPTO_Type * base)485 __STATIC_INLINE void CY_CRYPTO_VU_POP_REG (CRYPTO_Type *base)
486 {
487     CY_CRYPTO_VU_COND_POP_REG (base, CY_CRYPTO_VU_COND_ALWAYS);
488 }
489 
CY_CRYPTO_VU_COND_ALLOC_MEM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t size)490 __STATIC_INLINE void CY_CRYPTO_VU_COND_ALLOC_MEM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t size)
491 {
492     CY_ASSERT_L1( (uint32_t)(CY_CRYPTO_VU_READ_SP_REG(base) * 4u) >= CY_CRYPTO_BYTE_SIZE_OF_BITS(size) );
493 
494     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
495           (uint32_t)CY_CRYPTO_VU_ALLOC_MEM_OPC,
496          ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
497          ((uint32_t)rdst << ((CY_CRYPTO_V1) ? CY_CRYPTO_RSRC12_SHIFT : CY_CRYPTO_RSRC16_SHIFT)) |
498         (((uint32_t)size - 1u)  << CY_CRYPTO_RSRC0_SHIFT));
499 }
500 
CY_CRYPTO_VU_ALLOC_MEM(CRYPTO_Type * base,uint32_t rdst,uint32_t size)501 __STATIC_INLINE void CY_CRYPTO_VU_ALLOC_MEM (CRYPTO_Type *base, uint32_t rdst, uint32_t size)
502 {
503     CY_CRYPTO_VU_COND_ALLOC_MEM (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, size);
504 }
505 
CY_CRYPTO_VU_COND_FREE_MEM(CRYPTO_Type * base,uint32_t cc,uint32_t reg_mask)506 __STATIC_INLINE void CY_CRYPTO_VU_COND_FREE_MEM (CRYPTO_Type *base, uint32_t cc, uint32_t reg_mask)
507 {
508     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
509                                      (uint32_t)CY_CRYPTO_VU_FREE_MEM_OPC,
510                                     ((uint32_t)cc << CY_CRYPTO_RSRC20_SHIFT) |
511                                     ((uint32_t)reg_mask));
512 }
513 
CY_CRYPTO_VU_FREE_MEM(CRYPTO_Type * base,uint32_t reg_mask)514 __STATIC_INLINE void CY_CRYPTO_VU_FREE_MEM (CRYPTO_Type *base, uint32_t reg_mask)
515 {
516     CY_CRYPTO_VU_COND_FREE_MEM (base, CY_CRYPTO_VU_COND_ALWAYS, reg_mask);
517 }
518 
CY_CRYPTO_VU_COND_LSL(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)519 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSL (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
520 {
521     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
522                                      (uint32_t)CY_CRYPTO_VU_LSL_OPC,
523                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
524                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
525                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
526                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
527 }
528 
CY_CRYPTO_VU_LSL(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)529 __STATIC_INLINE void CY_CRYPTO_VU_LSL (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
530 {
531     CY_CRYPTO_VU_COND_LSL (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
532 }
533 
CY_CRYPTO_VU_COND_LSL1(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)534 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSL1 (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
535 {
536     if (CY_CRYPTO_V1)
537     {
538         /******* V1 *********/
539         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
540                                          (uint32_t)CY_CRYPTO_VU_LSL1_OPC,
541                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
542                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
543                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
544     }
545     else
546     {
547         /******* V2 *********/
548         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
549                                          (uint32_t)CY_CRYPTO_VU_LSL1_OPC,
550                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
551                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
552                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
553                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
554     }
555 }
556 
CY_CRYPTO_VU_LSL1(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)557 __STATIC_INLINE void CY_CRYPTO_VU_LSL1 (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
558 {
559     CY_CRYPTO_VU_COND_LSL1 (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
560 }
561 
CY_CRYPTO_VU_COND_LSL1_WITH_CARRY(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)562 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSL1_WITH_CARRY (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
563 {
564     if (CY_CRYPTO_V1)
565     {
566         /******* V1 *********/
567         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
568                                          (uint32_t)CY_CRYPTO_VU_LSL1_WITH_CARRY_OPC,
569                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
570                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
571                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
572     }
573     else
574     {
575         /******* V2 *********/
576         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
577                                          (uint32_t)CY_CRYPTO_VU_LSL1_WITH_CARRY_OPC,
578                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
579                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
580                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
581                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
582     }
583 }
584 
CY_CRYPTO_VU_LSL1_WITH_CARRY(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)585 __STATIC_INLINE void CY_CRYPTO_VU_LSL1_WITH_CARRY (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
586 {
587     CY_CRYPTO_VU_COND_LSL1_WITH_CARRY (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
588 }
589 
CY_CRYPTO_VU_COND_LSR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)590 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
591 {
592     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
593                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_LSR_OPC : CY_CRYPTO_VU2_LSR_OPC),
594                                     ((uint32_t)cc       << CY_CRYPTO_RSRC20_SHIFT) |
595                                     ((uint32_t)rdst     << CY_CRYPTO_RSRC12_SHIFT) |
596                                     ((uint32_t)rsrc1    << CY_CRYPTO_RSRC4_SHIFT)  |
597                                     ((uint32_t)rsrc0    << CY_CRYPTO_RSRC0_SHIFT));
598 }
599 
CY_CRYPTO_VU_LSR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)600 __STATIC_INLINE void CY_CRYPTO_VU_LSR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
601 {
602     CY_CRYPTO_VU_COND_LSR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
603 }
604 
CY_CRYPTO_VU_COND_LSR1(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)605 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSR1 (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
606 {
607     if (CY_CRYPTO_V1)
608     {
609         /******* V1 *********/
610         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
611                                          (uint32_t)CY_CRYPTO_VU1_LSR1_OPC,
612                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
613                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
614                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
615     }
616     else
617     {
618         /******* V2 *********/
619         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
620                                          (uint32_t)CY_CRYPTO_VU2_LSR1_OPC,
621                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
622                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
623                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
624                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
625     }
626 }
627 
CY_CRYPTO_VU_LSR1(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)628 __STATIC_INLINE void CY_CRYPTO_VU_LSR1 (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
629 {
630     CY_CRYPTO_VU_COND_LSR1 (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
631 }
632 
CY_CRYPTO_VU_COND_LSR1_WITH_CARRY(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)633 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSR1_WITH_CARRY (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
634 {
635     if (CY_CRYPTO_V1)
636     {
637         /******* V1 *********/
638         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
639                                      (uint32_t)CY_CRYPTO_VU1_LSR1_WITH_CARRY_OPC,
640                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
641                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
642                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
643     }
644     else
645     {
646         /******* V2 *********/
647         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
648                                          (uint32_t)CY_CRYPTO_VU2_LSR1_WITH_CARRY_OPC,
649                                         ((uint32_t)cc      << CY_CRYPTO_RSRC20_SHIFT) |
650                                         ((uint32_t)rdst    << CY_CRYPTO_RSRC12_SHIFT) |
651                                         ((uint32_t)rsrc1   << CY_CRYPTO_RSRC4_SHIFT)  |
652                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
653     }
654 }
655 
CY_CRYPTO_VU_LSR1_WITH_CARRY(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)656 __STATIC_INLINE void CY_CRYPTO_VU_LSR1_WITH_CARRY (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
657 {
658     CY_CRYPTO_VU_COND_LSR1_WITH_CARRY (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
659 }
660 
CY_CRYPTO_VU_COND_CLSAME(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)661 __STATIC_INLINE void CY_CRYPTO_VU_COND_CLSAME (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
662 {
663     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
664                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_CLSAME_OPC : CY_CRYPTO_VU2_CLSAME_OPC),
665                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
666                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
667                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
668                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
669 }
670 
CY_CRYPTO_VU_CLSAME(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)671 __STATIC_INLINE void CY_CRYPTO_VU_CLSAME (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
672 {
673     CY_CRYPTO_VU_COND_CLSAME (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
674 }
675 
CY_CRYPTO_VU_COND_CTSAME(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)676 __STATIC_INLINE void CY_CRYPTO_VU_COND_CTSAME (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
677 {
678     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
679                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_CTSAME_OPC : CY_CRYPTO_VU2_CTSAME_OPC),
680                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
681                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
682                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
683                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
684 }
685 
CY_CRYPTO_VU_CTSAME(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)686 __STATIC_INLINE void CY_CRYPTO_VU_CTSAME (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
687 {
688     CY_CRYPTO_VU_COND_CTSAME (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
689 }
690 
CY_CRYPTO_VU_COND_SET_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)691 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
692 {
693     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
694                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_SET_BIT_OPC : CY_CRYPTO_VU2_SET_BIT_OPC),
695                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
696                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
697                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
698  }
699 
CY_CRYPTO_VU_SET_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)700 __STATIC_INLINE void CY_CRYPTO_VU_SET_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
701 {
702     CY_CRYPTO_VU_COND_SET_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
703 }
704 
CY_CRYPTO_VU_COND_CLR_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)705 __STATIC_INLINE void CY_CRYPTO_VU_COND_CLR_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
706 {
707     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
708                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_CLR_BIT_OPC : CY_CRYPTO_VU2_CLR_BIT_OPC),
709                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
710                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
711                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
712 }
713 
CY_CRYPTO_VU_CLR_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)714 __STATIC_INLINE void CY_CRYPTO_VU_CLR_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
715 {
716     CY_CRYPTO_VU_COND_CLR_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
717 }
718 
CY_CRYPTO_VU_COND_INV_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)719 __STATIC_INLINE void CY_CRYPTO_VU_COND_INV_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
720 {
721     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
722                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_INV_BIT_OPC : CY_CRYPTO_VU2_INV_BIT_OPC),
723                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
724                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
725                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
726 }
727 
CY_CRYPTO_VU_INV_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)728 __STATIC_INLINE void CY_CRYPTO_VU_INV_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
729 {
730     CY_CRYPTO_VU_COND_INV_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
731 }
732 
CY_CRYPTO_VU_COND_GET_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)733 __STATIC_INLINE void CY_CRYPTO_VU_COND_GET_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
734 {
735     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
736                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_GET_BIT_OPC : CY_CRYPTO_VU2_GET_BIT_OPC),
737                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
738                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
739                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
740                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
741 }
742 
CY_CRYPTO_VU_GET_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)743 __STATIC_INLINE void CY_CRYPTO_VU_GET_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
744 {
745     CY_CRYPTO_VU_COND_GET_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
746 }
747 
748 /******************************************************************************/
CY_CRYPTO_VU_COND_SET_BIT_IMM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t imm13)749 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_BIT_IMM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t imm13)
750 {
751     if (CY_CRYPTO_V1)
752     {
753         /******** V1 ********/
754         uint32_t tmpReg = (rdst != CY_CRYPTO_VU_HW_REG14) ? CY_CRYPTO_VU_HW_REG14 : CY_CRYPTO_VU_HW_REG13;
755         uint32_t tmpData;
756         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
757 
758         /* Load 13 bit immediate value */
759         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm13, 13u);
760         CY_CRYPTO_VU_COND_SET_BIT(base, cc, rdst, tmpReg);
761 
762         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
763 
764         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
765     }
766     else
767     {
768         /******** V2 ********/
769             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
770                                          (uint32_t)CY_CRYPTO_VU2_SET_BIT_IMM_OPC,
771                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
772                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC16_SHIFT) |
773                                         ((uint32_t)imm13 << CY_CRYPTO_RSRC0_SHIFT));
774     }
775 }
776 
CY_CRYPTO_VU_SET_BIT_IMM(CRYPTO_Type * base,uint32_t rdst,uint32_t imm13)777 __STATIC_INLINE void CY_CRYPTO_VU_SET_BIT_IMM (CRYPTO_Type *base, uint32_t rdst, uint32_t imm13)
778 {
779     CY_CRYPTO_VU_COND_SET_BIT_IMM(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, imm13);
780 }
781 
CY_CRYPTO_VU_COND_CLR_BIT_IMM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t imm13)782 __STATIC_INLINE void CY_CRYPTO_VU_COND_CLR_BIT_IMM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t imm13)
783 {
784     if (CY_CRYPTO_V1)
785     {
786         /******** V1 ********/
787         uint32_t tmpReg = (rdst != CY_CRYPTO_VU_HW_REG14) ? CY_CRYPTO_VU_HW_REG14 : CY_CRYPTO_VU_HW_REG13;
788         uint32_t tmpData;
789         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
790 
791         /* Load 13 bit immediate value */
792         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm13, 13u);
793         CY_CRYPTO_VU_COND_CLR_BIT(base, cc, rdst, tmpReg);
794 
795         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
796 
797         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
798     }
799     else
800     {
801         /******** V2 ********/
802             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
803                                          (uint32_t)CY_CRYPTO_VU2_CLR_BIT_IMM_OPC,
804                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
805                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC16_SHIFT) |
806                                         ((uint32_t)imm13 << CY_CRYPTO_RSRC0_SHIFT));
807     }
808 }
809 
CY_CRYPTO_VU_CLR_BIT_IMM(CRYPTO_Type * base,uint32_t rdst,uint32_t imm13)810 __STATIC_INLINE void CY_CRYPTO_VU_CLR_BIT_IMM (CRYPTO_Type *base, uint32_t rdst, uint32_t imm13)
811 {
812     CY_CRYPTO_VU_COND_CLR_BIT_IMM(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, imm13);
813 }
814 
CY_CRYPTO_VU_COND_INV_BIT_IMM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t imm13)815 __STATIC_INLINE void CY_CRYPTO_VU_COND_INV_BIT_IMM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t imm13)
816 {
817     if (CY_CRYPTO_V1)
818     {
819         /******** V1 ********/
820         uint32_t tmpReg = (rdst != CY_CRYPTO_VU_HW_REG14) ? CY_CRYPTO_VU_HW_REG14 : CY_CRYPTO_VU_HW_REG13;
821         uint32_t tmpData;
822         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
823 
824         /* Load 13 bit immediate value */
825         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm13, 13u);
826         CY_CRYPTO_VU_COND_INV_BIT(base, cc, rdst, tmpReg);
827 
828         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
829 
830         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
831     }
832     else
833     {
834         /******** V2 ********/
835             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
836                                          (uint32_t)CY_CRYPTO_VU2_INV_BIT_IMM_OPC,
837                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
838                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC16_SHIFT) |
839                                         ((uint32_t)imm13 << CY_CRYPTO_RSRC0_SHIFT));
840     }
841 }
842 
CY_CRYPTO_VU_INV_BIT_IMM(CRYPTO_Type * base,uint32_t rdst,uint32_t imm13)843 __STATIC_INLINE void CY_CRYPTO_VU_INV_BIT_IMM (CRYPTO_Type *base, uint32_t rdst, uint32_t imm13)
844 {
845     CY_CRYPTO_VU_COND_INV_BIT_IMM(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, imm13);
846 }
847 
848 /******************************************************************************/
CY_CRYPTO_VU_COND_TST(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc)849 __STATIC_INLINE void CY_CRYPTO_VU_COND_TST (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc)
850 {
851     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
852                                      (uint32_t)CY_CRYPTO_VU_TST_OPC,
853                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
854                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
855 }
856 
CY_CRYPTO_VU_TST(CRYPTO_Type * base,uint32_t rsrc)857 __STATIC_INLINE void CY_CRYPTO_VU_TST (CRYPTO_Type *base, uint32_t rsrc)
858 {
859     CY_CRYPTO_VU_COND_TST (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc);
860 }
861 
CY_CRYPTO_VU_COND_MOV(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)862 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
863 {
864     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
865                                      (uint32_t)CY_CRYPTO_VU_MOV_OPC,
866                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
867                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
868                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
869 }
870 
CY_CRYPTO_VU_MOV(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)871 __STATIC_INLINE void CY_CRYPTO_VU_MOV (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
872 {
873     CY_CRYPTO_VU_COND_MOV (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
874 }
875 
CY_CRYPTO_VU_COND_XSQUARE(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)876 __STATIC_INLINE void CY_CRYPTO_VU_COND_XSQUARE (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
877 {
878     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
879                                      (uint32_t)CY_CRYPTO_VU_XSQUARE_OPC,
880                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
881                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
882                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
883 }
884 
CY_CRYPTO_VU_XSQUARE(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)885 __STATIC_INLINE void CY_CRYPTO_VU_XSQUARE (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
886 {
887     CY_CRYPTO_VU_COND_XSQUARE (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
888 }
889 
CY_CRYPTO_VU_COND_XMUL(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)890 __STATIC_INLINE void CY_CRYPTO_VU_COND_XMUL (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
891 {
892     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
893                                      (uint32_t)CY_CRYPTO_VU_XMUL_OPC,
894                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
895                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
896                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
897                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
898 }
899 
CY_CRYPTO_VU_XMUL(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)900 __STATIC_INLINE void CY_CRYPTO_VU_XMUL (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
901 {
902     CY_CRYPTO_VU_COND_XMUL (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
903 }
904 
905 
CY_CRYPTO_VU_COND_UMUL(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)906 __STATIC_INLINE void CY_CRYPTO_VU_COND_UMUL (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
907 {
908     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
909                                      (uint32_t)CY_CRYPTO_VU_UMUL_OPC,
910                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
911                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
912                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
913                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
914 }
915 
CY_CRYPTO_VU_UMUL(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)916 __STATIC_INLINE void CY_CRYPTO_VU_UMUL (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
917 {
918     CY_CRYPTO_VU_COND_UMUL (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
919 }
920 
CY_CRYPTO_VU_COND_USQUARE(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)921 __STATIC_INLINE void CY_CRYPTO_VU_COND_USQUARE (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
922 {
923     if (CY_CRYPTO_V1)
924     {
925         /***** V1 *******/
926         CY_CRYPTO_VU_COND_UMUL(base, cc, rdst, rsrc, rsrc);
927     }
928     else
929     {
930         /***** V2 *******/
931             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
932                                          (uint32_t)CY_CRYPTO_VU2_USQUARE_OPC,
933                                         ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
934                                         ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
935                                         ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
936     }
937 }
938 
CY_CRYPTO_VU_USQUARE(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)939 __STATIC_INLINE void CY_CRYPTO_VU_USQUARE (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
940 {
941     CY_CRYPTO_VU_COND_USQUARE(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
942 }
943 
CY_CRYPTO_VU_COND_SET_TO_ZERO(CRYPTO_Type * base,uint32_t cc,uint32_t rdst)944 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_TO_ZERO (CRYPTO_Type *base, uint32_t cc, uint32_t rdst)
945 {
946     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
947                                      (uint32_t)CY_CRYPTO_VU_SET_TO_ZERO_OPC,
948                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
949                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT));
950 }
951 
CY_CRYPTO_VU_SET_TO_ZERO(CRYPTO_Type * base,uint32_t rdst)952 __STATIC_INLINE void CY_CRYPTO_VU_SET_TO_ZERO (CRYPTO_Type *base, uint32_t rdst)
953 {
954     CY_CRYPTO_VU_COND_SET_TO_ZERO (base, CY_CRYPTO_VU_COND_ALWAYS, rdst);
955 }
956 
CY_CRYPTO_VU_COND_SET_TO_ONE(CRYPTO_Type * base,uint32_t cc,uint32_t rdst)957 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_TO_ONE (CRYPTO_Type *base, uint32_t cc, uint32_t rdst)
958 {
959     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
960                                      (uint32_t)CY_CRYPTO_VU_SET_TO_ONE_OPC,
961                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
962                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT));
963 }
964 
CY_CRYPTO_VU_SET_TO_ONE(CRYPTO_Type * base,uint32_t rdst)965 __STATIC_INLINE void CY_CRYPTO_VU_SET_TO_ONE (CRYPTO_Type *base, uint32_t rdst)
966 {
967     CY_CRYPTO_VU_COND_SET_TO_ONE (base, CY_CRYPTO_VU_COND_ALWAYS, rdst);
968 }
969 
CY_CRYPTO_VU_COND_ADD(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)970 __STATIC_INLINE void CY_CRYPTO_VU_COND_ADD (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
971 {
972     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
973                                      (uint32_t)CY_CRYPTO_VU_ADD_OPC,
974                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
975                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
976                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
977                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
978 }
979 
CY_CRYPTO_VU_ADD(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)980 __STATIC_INLINE void CY_CRYPTO_VU_ADD (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
981 {
982     CY_CRYPTO_VU_COND_ADD (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
983 }
984 
CY_CRYPTO_VU_COND_SUB(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)985 __STATIC_INLINE void CY_CRYPTO_VU_COND_SUB (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
986 {
987     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING, (uint32_t)CY_CRYPTO_VU_SUB_OPC,
988                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
989                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
990                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
991                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
992 }
993 
CY_CRYPTO_VU_SUB(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)994 __STATIC_INLINE void CY_CRYPTO_VU_SUB (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
995 {
996     CY_CRYPTO_VU_COND_SUB (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
997 }
998 
CY_CRYPTO_VU_COND_OR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)999 __STATIC_INLINE void CY_CRYPTO_VU_COND_OR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1000 {
1001     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1002                                      (uint32_t)CY_CRYPTO_VU_OR_OPC,
1003                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1004                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1005                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1006                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1007 }
1008 
CY_CRYPTO_VU_OR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1009 __STATIC_INLINE void CY_CRYPTO_VU_OR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1010 {
1011     CY_CRYPTO_VU_COND_OR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1012 }
1013 
CY_CRYPTO_VU_COND_AND(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1014 __STATIC_INLINE void CY_CRYPTO_VU_COND_AND (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1015 {
1016     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1017                                      (uint32_t)CY_CRYPTO_VU_AND_OPC,
1018                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1019                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1020                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1021                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1022 }
1023 
CY_CRYPTO_VU_AND(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1024 __STATIC_INLINE void CY_CRYPTO_VU_AND (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1025 {
1026     CY_CRYPTO_VU_COND_AND (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1027 }
1028 
CY_CRYPTO_VU_COND_XOR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1029 __STATIC_INLINE void CY_CRYPTO_VU_COND_XOR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1030 {
1031     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING, (uint32_t)CY_CRYPTO_VU_XOR_OPC,
1032                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1033                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1034                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1035                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1036 }
1037 
CY_CRYPTO_VU_XOR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1038 __STATIC_INLINE void CY_CRYPTO_VU_XOR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1039 {
1040     CY_CRYPTO_VU_COND_XOR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1041 }
1042 
CY_CRYPTO_VU_COND_NOR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1043 __STATIC_INLINE void CY_CRYPTO_VU_COND_NOR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1044 {
1045     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1046                                      (uint32_t)CY_CRYPTO_VU_NOR_OPC,
1047                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1048                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1049                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1050                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1051 }
1052 
CY_CRYPTO_VU_NOR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1053 __STATIC_INLINE void CY_CRYPTO_VU_NOR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1054 {
1055     CY_CRYPTO_VU_COND_NOR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1056 }
1057 
CY_CRYPTO_VU_COND_NAND(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1058 __STATIC_INLINE void CY_CRYPTO_VU_COND_NAND (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1059 {
1060     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1061                                      (uint32_t)CY_CRYPTO_VU_NAND_OPC,
1062                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1063                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1064                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1065                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1066 }
1067 
CY_CRYPTO_VU_NAND(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1068 __STATIC_INLINE void CY_CRYPTO_VU_NAND (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1069 {
1070     CY_CRYPTO_VU_COND_NAND (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1071 }
1072 
CY_CRYPTO_VU_COND_CMP_SUB(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc1,uint32_t rsrc0)1073 __STATIC_INLINE void CY_CRYPTO_VU_COND_CMP_SUB (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc1, uint32_t rsrc0)
1074 {
1075     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1076                                      (uint32_t)CY_CRYPTO_VU_CMP_SUB_OPC,
1077                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1078                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1079                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1080 }
1081 
CY_CRYPTO_VU_CMP_SUB(CRYPTO_Type * base,uint32_t rsrc1,uint32_t rsrc0)1082 __STATIC_INLINE void CY_CRYPTO_VU_CMP_SUB (CRYPTO_Type *base, uint32_t rsrc1, uint32_t rsrc0)
1083 {
1084     CY_CRYPTO_VU_COND_CMP_SUB (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc1, rsrc0);
1085 }
1086 
CY_CRYPTO_VU_COND_CMP_DEGREE(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc1,uint32_t rsrc0)1087 __STATIC_INLINE void CY_CRYPTO_VU_COND_CMP_DEGREE (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc1, uint32_t rsrc0)
1088 {
1089     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1090                                      (uint32_t)CY_CRYPTO_VU_CMP_DEGREE_OPC,
1091                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1092                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1093                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1094 }
1095 
CY_CRYPTO_VU_CMP_DEGREE(CRYPTO_Type * base,uint32_t rsrc1,uint32_t rsrc0)1096 __STATIC_INLINE void CY_CRYPTO_VU_CMP_DEGREE (CRYPTO_Type *base, uint32_t rsrc1, uint32_t rsrc0)
1097 {
1098     CY_CRYPTO_VU_COND_CMP_DEGREE (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc1, rsrc0);
1099 }
1100 
CY_CRYPTO_VU_SAVE_REG(CRYPTO_Type * base,uint32_t rsrc,uint32_t * data)1101 __STATIC_INLINE void CY_CRYPTO_VU_SAVE_REG (CRYPTO_Type *base, uint32_t rsrc, uint32_t *data)
1102 {
1103     *data = REG_CRYPTO_VU_RF_DATA(base, rsrc);
1104 }
1105 
CY_CRYPTO_VU_RESTORE_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t data)1106 __STATIC_INLINE void CY_CRYPTO_VU_RESTORE_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data)
1107 {
1108     CY_CRYPTO_VU_SET_REG(base, rdst, CY_CRYPTO_VU_GET_REG_DATA(data), CY_CRYPTO_VU_GET_REG_SIZE(data) + 1u);
1109 }
1110 
1111 
1112 #endif /* #if (CPUSS_CRYPTO_VU == 1) */
1113 
1114 #if defined(__cplusplus)
1115 }
1116 #endif
1117 
1118 #endif /* CY_IP_MXCRYPTO */
1119 
1120 
1121 #endif /* #if !defined (CY_CRYPTO_CORE_HW_VU_H) */
1122