1 /***************************************************************************//**
2 * \file cy_crypto_core_hw_vu.h
3 * \version 2.120
4 *
5 * \brief
6 *  This file provides constants and function prototypes
7 *  for the Vector Unit functions in the Crypto block driver.
8 *
9 ********************************************************************************
10 * \copyright
11 * Copyright (c) (2020-2022), Cypress Semiconductor Corporation (an Infineon company) or
12 * an affiliate of Cypress Semiconductor Corporation.
13 * SPDX-License-Identifier: Apache-2.0
14 *
15 * Licensed under the Apache License, Version 2.0 (the "License");
16 * you may not use this file except in compliance with the License.
17 * You may obtain a copy of the License at
18 *
19 *    http://www.apache.org/licenses/LICENSE-2.0
20 *
21 * Unless required by applicable law or agreed to in writing, software
22 * distributed under the License is distributed on an "AS IS" BASIS,
23 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 * See the License for the specific language governing permissions and
25 * limitations under the License.
26 *******************************************************************************/
27 
28 
29 #if !defined (CY_CRYPTO_CORE_HW_VU_H)
30 #define CY_CRYPTO_CORE_HW_VU_H
31 
32 #include "cy_crypto_core_hw.h"
33 
34 #if defined (CY_IP_MXCRYPTO)
35 
36 #if defined(__cplusplus)
37 extern "C" {
38 #endif
39 
40 CY_MISRA_DEVIATE_BLOCK_START('MISRA C-2012 Rule 14.3', 23, \
41 'Since value of CY_CRYPTO_V1 is decided by PDL device agnostic / hardware specific model, controlling expression will not have an invariant value.')
42 
43 #if (CPUSS_CRYPTO_VU == 1)
44 
45 /***************************************
46 *        Crypto IP opcodes
47 ***************************************/
48 
49 #define CY_CRYPTO_VU_HW_REG0            (0u)
50 #define CY_CRYPTO_VU_HW_REG1            (1u)
51 #define CY_CRYPTO_VU_HW_REG2            (2u)
52 #define CY_CRYPTO_VU_HW_REG3            (3u)
53 #define CY_CRYPTO_VU_HW_REG4            (4u)
54 #define CY_CRYPTO_VU_HW_REG5            (5u)
55 #define CY_CRYPTO_VU_HW_REG6            (6u)
56 #define CY_CRYPTO_VU_HW_REG7            (7u)
57 #define CY_CRYPTO_VU_HW_REG8            (8u)
58 #define CY_CRYPTO_VU_HW_REG9            (9u)
59 #define CY_CRYPTO_VU_HW_REG10           (10u)
60 #define CY_CRYPTO_VU_HW_REG11           (11u)
61 #define CY_CRYPTO_VU_HW_REG12           (12u)
62 #define CY_CRYPTO_VU_HW_REG13           (13u)
63 #define CY_CRYPTO_VU_HW_REG14           (14u)
64 #define CY_CRYPTO_VU_HW_REG15           (15u)
65 
66 /* Crypto IP condition codes (vector unit) */
67 #define CY_CRYPTO_VU_COND_ALWAYS        (0x00u)
68 #define CY_CRYPTO_VU_COND_EQ            (0x01u)
69 #define CY_CRYPTO_VU_COND_NE            (0x02u)
70 #define CY_CRYPTO_VU_COND_CS            (0x03u)
71 #define CY_CRYPTO_VU_COND_CC            (0x04u)
72 #define CY_CRYPTO_VU_COND_HI            (0x05u)
73 #define CY_CRYPTO_VU_COND_LS            (0x06u)
74 #define CY_CRYPTO_VU_COND_EVEN          (0x07u)
75 #define CY_CRYPTO_VU_COND_ODD           (0x08u)
76 #define CY_CRYPTO_VU_COND_MASK          (0x1FFu)
77 
78 /* Crypto IP status (vector unit) */
79 #define CY_CRYPTO_VU_STATUS_CARRY       (0u)
80 #define CY_CRYPTO_VU_STATUS_EVEN        (1u)
81 #define CY_CRYPTO_VU_STATUS_ZERO        (2u)
82 #define CY_CRYPTO_VU_STATUS_ONE         (3u)
83 #define CY_CRYPTO_VU_STATUS_CARRY_BIT   (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_CARRY)
84 #define CY_CRYPTO_VU_STATUS_EVEN_BIT    (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_EVEN)
85 #define CY_CRYPTO_VU_STATUS_ZERO_BIT    (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_ZERO)
86 #define CY_CRYPTO_VU_STATUS_ONE_BIT     (uint32_t)(((uint32_t)1u) << CY_CRYPTO_VU_STATUS_ONE)
87 #define CY_CRYPTO_VU_STATUS_MASK        (uint32_t)(CY_CRYPTO_VU_STATUS_CARRY_BIT | CY_CRYPTO_VU_STATUS_EVEN_BIT \
88                                                    CY_CRYPTO_VU_STATUS_ZERO_BIT | CY_CRYPTO_VU_STATUS_ONE_BIT)
89 
90 #define CY_CRYPTO_VU_REG_BIT(nreg)      (((uint32_t)1u) << (nreg))
91 
92 /* Crypto registers field processing (vector unit) */
93 #define CY_CRYPTO_VU_GET_REG_DATA(data)       (((data) >> 16U) & 0x00003fffUL)
94 #define CY_CRYPTO_VU_GET_REG_SIZE(data)       ((data) & 0x00000fffUL)
95 
96 #define CY_CRYPTO_VU_SIZE_FLD_MASK            (0x00001fffuL)
97 #define CY_CRYPTO_VU_DATA_FLD_MASK            (0x00003fffuL)
98 #define CY_CRYPTO_VU_DATA_FLD_POS             (16u)
99 
100 /* Vector Unit instructions */
101 #define CY_CRYPTO_VU_ALLOC_MEM_OPC            (0x12u)
102 #define CY_CRYPTO_VU_FREE_MEM_OPC             (0x13u)
103 
104 /* Instructions with register operand only, category I */
105 #define CY_CRYPTO_VU_SET_REG_OPC              (0x80u)
106 
107 #define CY_CRYPTO_VU_LD_REG_OPC               (0x00u)
108 #define CY_CRYPTO_VU_ST_REG_OPC               (0x01u)
109 #define CY_CRYPTO_VU_MOV_REG_OPC              (0x02u)
110 
111 /* Instructions with register operand only, category III */
112 #define CY_CRYPTO_VU_SWAP_REG_OPC             (0x03u)
113 
114 /* Instructions with register operand only, category IV */
115 #define CY_CRYPTO_VU_MOV_REG_TO_STATUS_OPC    (0x04u)
116 
117 /* Instructions with register operand only, category V */
118 #define CY_CRYPTO_VU_MOV_STATUS_TO_REG_OPC    (0x05u)
119 
120 #define CY_CRYPTO_VU2_MOV_IMM_TO_STATUS_OPC   (0x0Fu)
121 
122 /* Instructions with register operand only, category VI */
123 #define CY_CRYPTO_VU_PUSH_REG_OPC             (0x10u)
124 #define CY_CRYPTO_VU_POP_REG_OPC              (0x11u)
125 
126 /* Instructions with register operand only, category VII */
127 #define CY_CRYPTO_VU_ADD_REG_OPC              (0x06u)
128 #define CY_CRYPTO_VU_SUB_REG_OPC              (0x07u)
129 #define CY_CRYPTO_VU_OR_REG_OPC               (0x08u)
130 #define CY_CRYPTO_VU_AND_REG_OPC              (0x09u)
131 #define CY_CRYPTO_VU_XOR_REG_OPC              (0x0Au)
132 #define CY_CRYPTO_VU_NOR_REG_OPC              (0x0Bu)
133 #define CY_CRYPTO_VU_NAND_REG_OPC             (0x0Cu)
134 #define CY_CRYPTO_VU_MIN_REG_OPC              (0x0Du)
135 #define CY_CRYPTO_VU_MAX_REG_OPC              (0x0Eu)
136 
137 /* Instructions with mixed operands, category I */
138 #define CY_CRYPTO_VU_LSL_OPC                  (0x20u)
139 #define CY_CRYPTO_VU1_LSR_OPC                 (0x24u)
140 #define CY_CRYPTO_VU2_LSR_OPC                 (0x23u)
141 
142 /* Instructions with mixed operands, category II */
143 #define CY_CRYPTO_VU_LSL1_OPC                 (0x21u)
144 #define CY_CRYPTO_VU_LSL1_WITH_CARRY_OPC      (0x22u)
145 
146 #define CY_CRYPTO_VU1_LSR1_OPC                (0x25u)
147 #define CY_CRYPTO_VU1_LSR1_WITH_CARRY_OPC     (0x26u)
148 #define CY_CRYPTO_VU2_LSR1_OPC                (0x24u)
149 #define CY_CRYPTO_VU2_LSR1_WITH_CARRY_OPC     (0x25u)
150 
151 /* Instructions with mixed operands, category III */
152 #define CY_CRYPTO_VU1_SET_BIT_OPC             (0x2Cu)
153 #define CY_CRYPTO_VU1_CLR_BIT_OPC             (0x2Du)
154 #define CY_CRYPTO_VU1_INV_BIT_OPC             (0x2Eu)
155 
156 #define CY_CRYPTO_VU2_SET_BIT_OPC             (0x28u)
157 #define CY_CRYPTO_VU2_CLR_BIT_OPC             (0x29u)
158 #define CY_CRYPTO_VU2_INV_BIT_OPC             (0x2Au)
159 
160 /* Instructions with mixed operands, category IV */
161 #define CY_CRYPTO_VU1_GET_BIT_OPC             (0x2Fu)
162 #define CY_CRYPTO_VU2_GET_BIT_OPC             (0x2Bu)
163 
164 /* Instructions with mixed operands, category V */
165 #define CY_CRYPTO_VU1_CLSAME_OPC              (0x28u)
166 #define CY_CRYPTO_VU1_CTSAME_OPC              (0x29u)
167 
168 #define CY_CRYPTO_VU2_CLSAME_OPC              (0x26u)
169 #define CY_CRYPTO_VU2_CTSAME_OPC              (0x27u)
170 
171 /* Instructions with memory buffer operands, category I */
172 #define CY_CRYPTO_VU_SET_TO_ZERO_OPC          (0x34u)
173 #define CY_CRYPTO_VU_SET_TO_ONE_OPC           (0x35u)
174 
175 /* Instructions with memory buffer operands, category II */
176 #define CY_CRYPTO_VU_MOV_OPC                  (0x30u)
177 #define CY_CRYPTO_VU_XSQUARE_OPC              (0x31u)
178 #define CY_CRYPTO_VU2_USQUARE_OPC             (0x2Fu)
179 
180 /* Instructions with memory buffer operands, category III */
181 #define CY_CRYPTO_VU_CMP_SUB_OPC              (0x3Du)
182 #define CY_CRYPTO_VU_CMP_DEGREE_OPC           (0x3Eu)
183 
184 /* Instructions with memory buffer operands, category IV */
185 #define CY_CRYPTO_VU_TST_OPC                  (0x3fu)
186 
187 /* Instructions with memory buffer operands, category V */
188 #define CY_CRYPTO_VU_XMUL_OPC                 (0x32u)
189 #define CY_CRYPTO_VU_UMUL_OPC                 (0x33u)
190 #define CY_CRYPTO_VU_ADD_OPC                  (0x36u)
191 #define CY_CRYPTO_VU_SUB_OPC                  (0x37u)
192 #define CY_CRYPTO_VU_OR_OPC                   (0x38u)
193 #define CY_CRYPTO_VU_AND_OPC                  (0x39u)
194 #define CY_CRYPTO_VU_XOR_OPC                  (0x3Au)
195 #define CY_CRYPTO_VU_NOR_OPC                  (0x3Bu)
196 #define CY_CRYPTO_VU_NAND_OPC                 (0x3Cu)
197 
198 /* Instructions with memory buffer operands, category VI */
199 #define CY_CRYPTO_VU2_SET_BIT_IMM_OPC         (0x2Cu)
200 #define CY_CRYPTO_VU2_CLR_BIT_IMM_OPC         (0x2Du)
201 #define CY_CRYPTO_VU2_INV_BIT_IMM_OPC         (0x2Eu)
202 
203 #define CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base)  do { ; } while (0uL != _FLD2VAL(CRYPTO_STATUS_VU_BUSY, REG_CRYPTO_STATUS(base)))
204 #define CY_CRYPTO_VU_READ_SP_REG(base)         CY_CRYPTO_VU_GET_REG_DATA(REG_CRYPTO_VU_RF_DATA(base, 15u))
205 
206 __STATIC_INLINE void CY_CRYPTO_VU_SAVE_REG (CRYPTO_Type *base, uint32_t rsrc, uint32_t *data);
207 __STATIC_INLINE void CY_CRYPTO_VU_RESTORE_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data);
208 __STATIC_INLINE void CY_CRYPTO_VU_SET_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data, uint32_t size);
209 
210 
CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc)211 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc)
212 {
213     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
214                                      (uint32_t)CY_CRYPTO_VU_MOV_REG_TO_STATUS_OPC,
215                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
216                                     ((uint32_t)rsrc));
217 }
218 
CY_CRYPTO_VU_MOV_REG_TO_STATUS(CRYPTO_Type * base,uint32_t rsrc)219 __STATIC_INLINE void CY_CRYPTO_VU_MOV_REG_TO_STATUS (CRYPTO_Type *base, uint32_t rsrc)
220 {
221     CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc);
222 }
223 
CY_CRYPTO_VU_COND_MOV_STATUS_TO_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst)224 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_STATUS_TO_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst)
225 {
226     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
227                                      (uint32_t)CY_CRYPTO_VU_MOV_STATUS_TO_REG_OPC,
228                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
229                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT));
230 }
231 
CY_CRYPTO_VU_MOV_STATUS_TO_REG(CRYPTO_Type * base,uint32_t rdst)232 __STATIC_INLINE void CY_CRYPTO_VU_MOV_STATUS_TO_REG (CRYPTO_Type *base, uint32_t rdst)
233 {
234     CY_CRYPTO_VU_COND_MOV_STATUS_TO_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst);
235 }
236 
CY_CRYPTO_VU_COND_MOV_IMM_TO_STATUS(CRYPTO_Type * base,uint32_t cc,uint32_t imm4)237 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_IMM_TO_STATUS (CRYPTO_Type *base, uint32_t cc, uint32_t imm4)
238 {
239     if (CY_CRYPTO_V1)
240     {
241         /******* V1 *********/
242         uint32_t tmpReg = CY_CRYPTO_VU_HW_REG14;
243         uint32_t tmpData;
244 
245         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
246 
247         /* Load 4 bit immediate value */
248         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm4, 4u);
249         CY_CRYPTO_VU_COND_MOV_REG_TO_STATUS(base, cc, tmpReg);
250 
251         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
252 
253         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
254     }
255     else
256     {
257         /******* V2 *********/
258             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
259                                          (uint32_t)CY_CRYPTO_VU2_MOV_IMM_TO_STATUS_OPC,
260                                         ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
261                                         ((uint32_t)imm4 << CY_CRYPTO_RSRC0_SHIFT));
262     }
263 }
264 
CY_CRYPTO_VU_MOV_IMM_TO_STATUS(CRYPTO_Type * base,uint32_t imm4)265 __STATIC_INLINE void CY_CRYPTO_VU_MOV_IMM_TO_STATUS (CRYPTO_Type *base, uint32_t imm4)
266 {
267     CY_CRYPTO_VU_COND_MOV_IMM_TO_STATUS (base, CY_CRYPTO_VU_COND_ALWAYS, imm4);
268 }
269 
270 /*******************************************************************************/
CY_CRYPTO_VU_SET_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t data,uint32_t size)271 __STATIC_INLINE void CY_CRYPTO_VU_SET_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data, uint32_t size)
272 {
273     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
274                             (uint32_t)CY_CRYPTO_VU_SET_REG_OPC,
275                            ((uint32_t)rdst << CY_CRYPTO_RSRC26_SHIFT) |
276                            ((uint32_t)data << ((CY_CRYPTO_V1) ? CY_CRYPTO_RSRC12_SHIFT : CY_CRYPTO_RSRC13_SHIFT)) |
277                            (((uint32_t)size - 1u) << CY_CRYPTO_RSRC0_SHIFT));
278 }
279 
CY_CRYPTO_VU_COND_LD_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)280 __STATIC_INLINE void CY_CRYPTO_VU_COND_LD_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
281 {
282     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
283                             (uint32_t)CY_CRYPTO_VU_LD_REG_OPC,
284                            ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
285                            ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
286                            ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
287 }
288 
CY_CRYPTO_VU_LD_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)289 __STATIC_INLINE void CY_CRYPTO_VU_LD_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
290 {
291     CY_CRYPTO_VU_COND_LD_REG(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
292 }
293 
CY_CRYPTO_VU_COND_ST_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)294 __STATIC_INLINE void CY_CRYPTO_VU_COND_ST_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
295 {
296     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
297                             (uint32_t)CY_CRYPTO_VU_ST_REG_OPC,
298                            ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
299                            ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
300                            ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
301 }
302 
CY_CRYPTO_VU_ST_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)303 __STATIC_INLINE void CY_CRYPTO_VU_ST_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
304 {
305     CY_CRYPTO_VU_COND_ST_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
306 }
307 
CY_CRYPTO_VU_COND_MOV_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)308 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
309 {
310     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
311                                      (uint32_t)CY_CRYPTO_VU_MOV_REG_TO_STATUS_OPC,
312                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
313                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
314                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
315 }
316 
CY_CRYPTO_VU_MOV_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)317 __STATIC_INLINE void CY_CRYPTO_VU_MOV_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
318 {
319     CY_CRYPTO_VU_COND_MOV_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
320 }
321 
CY_CRYPTO_VU_COND_SWAP_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc1,uint32_t rsrc0)322 __STATIC_INLINE void CY_CRYPTO_VU_COND_SWAP_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc1, uint32_t rsrc0)
323 {
324     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
325                                      (uint32_t)CY_CRYPTO_VU_SWAP_REG_OPC,
326                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
327                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
328                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
329 }
330 
CY_CRYPTO_VU_SWAP_REG(CRYPTO_Type * base,uint32_t rsrc1,uint32_t rsrc0)331 __STATIC_INLINE void CY_CRYPTO_VU_SWAP_REG (CRYPTO_Type *base, uint32_t rsrc1, uint32_t rsrc0)
332 {
333     CY_CRYPTO_VU_COND_SWAP_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc1, rsrc0);
334 }
335 
CY_CRYPTO_VU_COND_ADD_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)336 __STATIC_INLINE void CY_CRYPTO_VU_COND_ADD_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
337 {
338     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
339                                      (uint32_t)CY_CRYPTO_VU_ADD_REG_OPC,
340                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
341                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
342                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
343                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
344 }
345 
CY_CRYPTO_VU_ADD_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)346 __STATIC_INLINE void CY_CRYPTO_VU_ADD_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
347 {
348     CY_CRYPTO_VU_COND_ADD_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
349 }
350 
CY_CRYPTO_VU_COND_SUB_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)351 __STATIC_INLINE void CY_CRYPTO_VU_COND_SUB_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
352 {
353     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
354                                      (uint32_t)CY_CRYPTO_VU_SUB_REG_OPC,
355                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
356                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
357                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
358                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
359 }
360 
CY_CRYPTO_VU_SUB_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)361 __STATIC_INLINE void CY_CRYPTO_VU_SUB_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
362 {
363     CY_CRYPTO_VU_COND_SUB_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
364 }
365 
CY_CRYPTO_VU_COND_OR_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)366 __STATIC_INLINE void CY_CRYPTO_VU_COND_OR_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
367 {
368     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
369                                      (uint32_t)CY_CRYPTO_VU_OR_REG_OPC,
370                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
371                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
372                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
373                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
374 }
375 
CY_CRYPTO_VU_OR_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)376 __STATIC_INLINE void CY_CRYPTO_VU_OR_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
377 {
378     CY_CRYPTO_VU_COND_OR_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
379 }
380 
CY_CRYPTO_VU_COND_AND_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)381 __STATIC_INLINE void CY_CRYPTO_VU_COND_AND_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
382 {
383     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
384                                      (uint32_t)CY_CRYPTO_VU_AND_REG_OPC,
385                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
386                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
387                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
388                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
389 }
390 
CY_CRYPTO_VU_AND_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)391 __STATIC_INLINE void CY_CRYPTO_VU_AND_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
392 {
393     CY_CRYPTO_VU_COND_AND_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
394 }
395 
CY_CRYPTO_VU_COND_XOR_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)396 __STATIC_INLINE void CY_CRYPTO_VU_COND_XOR_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
397 {
398     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
399                                      (uint32_t)CY_CRYPTO_VU_XOR_REG_OPC,
400                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
401                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
402                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
403                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
404 }
405 
CY_CRYPTO_VU_XOR_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)406 __STATIC_INLINE void CY_CRYPTO_VU_XOR_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
407 {
408     CY_CRYPTO_VU_COND_XOR_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
409 }
410 
CY_CRYPTO_VU_COND_NOR_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)411 __STATIC_INLINE void CY_CRYPTO_VU_COND_NOR_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
412 {
413     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
414                                      (uint32_t)CY_CRYPTO_VU_NOR_REG_OPC,
415                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
416                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
417                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
418                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
419 }
420 
CY_CRYPTO_VU_NOR_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)421 __STATIC_INLINE void CY_CRYPTO_VU_NOR_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
422 {
423     CY_CRYPTO_VU_COND_NOR_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
424 }
425 
CY_CRYPTO_VU_COND_NAND_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)426 __STATIC_INLINE void CY_CRYPTO_VU_COND_NAND_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
427 {
428     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
429                                      (uint32_t)CY_CRYPTO_VU_NAND_REG_OPC,
430                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
431                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
432                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
433                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
434 }
435 
CY_CRYPTO_VU_NAND_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)436 __STATIC_INLINE void CY_CRYPTO_VU_NAND_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
437 {
438     CY_CRYPTO_VU_COND_NAND_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
439 }
440 
CY_CRYPTO_VU_COND_MIN_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)441 __STATIC_INLINE void CY_CRYPTO_VU_COND_MIN_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
442 {
443     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
444                                      (uint32_t)CY_CRYPTO_VU_MIN_REG_OPC,
445                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
446                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
447                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
448                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
449 }
450 
CY_CRYPTO_VU_MIN_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)451 __STATIC_INLINE void CY_CRYPTO_VU_MIN_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
452 {
453     CY_CRYPTO_VU_COND_MIN_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
454 }
455 
CY_CRYPTO_VU_COND_MAX_REG(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)456 __STATIC_INLINE void CY_CRYPTO_VU_COND_MAX_REG (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
457 {
458     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
459                                      (uint32_t)CY_CRYPTO_VU_MAX_REG_OPC,
460                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
461                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
462                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
463                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
464 }
465 
CY_CRYPTO_VU_MAX_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)466 __STATIC_INLINE void CY_CRYPTO_VU_MAX_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
467 {
468     CY_CRYPTO_VU_COND_MAX_REG (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
469 }
470 
CY_CRYPTO_VU_COND_PUSH_REG(CRYPTO_Type * base,uint32_t cc)471 __STATIC_INLINE void CY_CRYPTO_VU_COND_PUSH_REG (CRYPTO_Type *base, uint32_t cc)
472 {
473     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
474                                     (uint32_t)CY_CRYPTO_VU_PUSH_REG_OPC,
475                                    ((uint32_t)cc << CY_CRYPTO_RSRC20_SHIFT));
476 }
477 
CY_CRYPTO_VU_PUSH_REG(CRYPTO_Type * base)478 __STATIC_INLINE void CY_CRYPTO_VU_PUSH_REG (CRYPTO_Type *base)
479 {
480     CY_CRYPTO_VU_COND_PUSH_REG (base, CY_CRYPTO_VU_COND_ALWAYS);
481 }
482 
CY_CRYPTO_VU_COND_POP_REG(CRYPTO_Type * base,uint32_t cc)483 __STATIC_INLINE void CY_CRYPTO_VU_COND_POP_REG (CRYPTO_Type *base, uint32_t cc)
484 {
485     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
486                                     (uint32_t)CY_CRYPTO_VU_POP_REG_OPC,
487                                    ((uint32_t)cc << CY_CRYPTO_RSRC20_SHIFT));
488 }
489 
CY_CRYPTO_VU_POP_REG(CRYPTO_Type * base)490 __STATIC_INLINE void CY_CRYPTO_VU_POP_REG (CRYPTO_Type *base)
491 {
492     CY_CRYPTO_VU_COND_POP_REG (base, CY_CRYPTO_VU_COND_ALWAYS);
493 }
494 
CY_CRYPTO_VU_COND_ALLOC_MEM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t size)495 __STATIC_INLINE cy_en_crypto_status_t CY_CRYPTO_VU_COND_ALLOC_MEM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t size)
496 {
497     if((uint32_t)(CY_CRYPTO_VU_READ_SP_REG(base) * 4u) < CY_CRYPTO_BYTE_SIZE_OF_BITS(size) )
498     {
499         return CY_CRYPTO_MEMORY_ALLOC_FAIL;
500     }
501 
502     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
503           (uint32_t)CY_CRYPTO_VU_ALLOC_MEM_OPC,
504          ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
505          ((uint32_t)rdst << ((CY_CRYPTO_V1) ? CY_CRYPTO_RSRC12_SHIFT : CY_CRYPTO_RSRC16_SHIFT)) |
506         (((uint32_t)size - 1u)  << CY_CRYPTO_RSRC0_SHIFT));
507 
508     return CY_CRYPTO_SUCCESS;
509 }
510 
CY_CRYPTO_VU_ALLOC_MEM(CRYPTO_Type * base,uint32_t rdst,uint32_t size)511 __STATIC_INLINE cy_en_crypto_status_t CY_CRYPTO_VU_ALLOC_MEM (CRYPTO_Type *base, uint32_t rdst, uint32_t size)
512 {
513     return CY_CRYPTO_VU_COND_ALLOC_MEM (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, size);
514 }
515 
CY_CRYPTO_VU_COND_FREE_MEM(CRYPTO_Type * base,uint32_t cc,uint32_t reg_mask)516 __STATIC_INLINE void CY_CRYPTO_VU_COND_FREE_MEM (CRYPTO_Type *base, uint32_t cc, uint32_t reg_mask)
517 {
518     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
519                                      (uint32_t)CY_CRYPTO_VU_FREE_MEM_OPC,
520                                     ((uint32_t)cc << CY_CRYPTO_RSRC20_SHIFT) |
521                                     ((uint32_t)reg_mask));
522 }
523 
CY_CRYPTO_VU_FREE_MEM(CRYPTO_Type * base,uint32_t reg_mask)524 __STATIC_INLINE void CY_CRYPTO_VU_FREE_MEM (CRYPTO_Type *base, uint32_t reg_mask)
525 {
526     CY_CRYPTO_VU_COND_FREE_MEM (base, CY_CRYPTO_VU_COND_ALWAYS, reg_mask);
527 }
528 
CY_CRYPTO_VU_COND_LSL(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)529 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSL (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
530 {
531     if ((CY_CRYPTO_V1) && (0u == REG_CRYPTO_VU_RF_DATA(base, rsrc0)))
532     {
533         CY_CRYPTO_VU_COND_XOR_REG(base, cc, rdst, rsrc1, rsrc0);
534     }
535     else
536     {
537 
538         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
539                                         (uint32_t)CY_CRYPTO_VU_LSL_OPC,
540                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
541                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
542                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
543                                         ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
544     }
545 }
546 
CY_CRYPTO_VU_LSL(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)547 __STATIC_INLINE void CY_CRYPTO_VU_LSL (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
548 {
549     CY_CRYPTO_VU_COND_LSL (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
550 }
551 
CY_CRYPTO_VU_COND_LSL1(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)552 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSL1 (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
553 {
554     if (CY_CRYPTO_V1)
555     {
556         /******* V1 *********/
557         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
558                                          (uint32_t)CY_CRYPTO_VU_LSL1_OPC,
559                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
560                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
561                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
562     }
563     else
564     {
565         /******* V2 *********/
566         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
567                                          (uint32_t)CY_CRYPTO_VU_LSL1_OPC,
568                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
569                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
570                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
571                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
572     }
573 }
574 
CY_CRYPTO_VU_LSL1(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)575 __STATIC_INLINE void CY_CRYPTO_VU_LSL1 (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
576 {
577     CY_CRYPTO_VU_COND_LSL1 (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
578 }
579 
CY_CRYPTO_VU_COND_LSL1_WITH_CARRY(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)580 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSL1_WITH_CARRY (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
581 {
582     if (CY_CRYPTO_V1)
583     {
584         /******* V1 *********/
585         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
586                                          (uint32_t)CY_CRYPTO_VU_LSL1_WITH_CARRY_OPC,
587                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
588                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
589                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
590     }
591     else
592     {
593         /******* V2 *********/
594         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
595                                          (uint32_t)CY_CRYPTO_VU_LSL1_WITH_CARRY_OPC,
596                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
597                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
598                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
599                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
600     }
601 }
602 
CY_CRYPTO_VU_LSL1_WITH_CARRY(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)603 __STATIC_INLINE void CY_CRYPTO_VU_LSL1_WITH_CARRY (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
604 {
605     CY_CRYPTO_VU_COND_LSL1_WITH_CARRY (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
606 }
607 
CY_CRYPTO_VU_COND_LSR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)608 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
609 {
610 
611     if ((CY_CRYPTO_V1) && (0u == REG_CRYPTO_VU_RF_DATA(base, rsrc0)))
612     {
613         CY_CRYPTO_VU_COND_XOR_REG(base, cc, rdst, rsrc1, rsrc0);
614     }
615     else
616     {
617          Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
618                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_LSR_OPC : CY_CRYPTO_VU2_LSR_OPC),
619                                     ((uint32_t)cc       << CY_CRYPTO_RSRC20_SHIFT) |
620                                     ((uint32_t)rdst     << CY_CRYPTO_RSRC12_SHIFT) |
621                                     ((uint32_t)rsrc1    << CY_CRYPTO_RSRC4_SHIFT)  |
622                                     ((uint32_t)rsrc0    << CY_CRYPTO_RSRC0_SHIFT));
623 
624     }
625 }
626 
CY_CRYPTO_VU_LSR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)627 __STATIC_INLINE void CY_CRYPTO_VU_LSR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
628 {
629     CY_CRYPTO_VU_COND_LSR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
630 }
631 
CY_CRYPTO_VU_COND_LSR1(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)632 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSR1 (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
633 {
634     if (CY_CRYPTO_V1)
635     {
636         /******* V1 *********/
637         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
638                                          (uint32_t)CY_CRYPTO_VU1_LSR1_OPC,
639                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
640                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
641                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
642     }
643     else
644     {
645         /******* V2 *********/
646         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
647                                          (uint32_t)CY_CRYPTO_VU2_LSR1_OPC,
648                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
649                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
650                                         ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
651                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
652     }
653 }
654 
CY_CRYPTO_VU_LSR1(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)655 __STATIC_INLINE void CY_CRYPTO_VU_LSR1 (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
656 {
657     CY_CRYPTO_VU_COND_LSR1(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
658 }
659 
CY_CRYPTO_VU_COND_LSR1_WITH_CARRY(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1)660 __STATIC_INLINE void CY_CRYPTO_VU_COND_LSR1_WITH_CARRY (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1)
661 {
662     if (CY_CRYPTO_V1)
663     {
664         /******* V1 *********/
665         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
666                                      (uint32_t)CY_CRYPTO_VU1_LSR1_WITH_CARRY_OPC,
667                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
668                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
669                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT));
670     }
671     else
672     {
673         /******* V2 *********/
674         Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
675                                          (uint32_t)CY_CRYPTO_VU2_LSR1_WITH_CARRY_OPC,
676                                         ((uint32_t)cc      << CY_CRYPTO_RSRC20_SHIFT) |
677                                         ((uint32_t)rdst    << CY_CRYPTO_RSRC12_SHIFT) |
678                                         ((uint32_t)rsrc1   << CY_CRYPTO_RSRC4_SHIFT)  |
679                                         ((uint32_t)CY_CRYPTO_VU_HW_REG15 << CY_CRYPTO_RSRC0_SHIFT));
680     }
681 }
682 
CY_CRYPTO_VU_LSR1_WITH_CARRY(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1)683 __STATIC_INLINE void CY_CRYPTO_VU_LSR1_WITH_CARRY (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1)
684 {
685     CY_CRYPTO_VU_COND_LSR1_WITH_CARRY (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1);
686 }
687 
CY_CRYPTO_VU_COND_CLSAME(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)688 __STATIC_INLINE void CY_CRYPTO_VU_COND_CLSAME (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
689 {
690     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
691                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_CLSAME_OPC : CY_CRYPTO_VU2_CLSAME_OPC),
692                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
693                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
694                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
695                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
696 }
697 
CY_CRYPTO_VU_CLSAME(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)698 __STATIC_INLINE void CY_CRYPTO_VU_CLSAME (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
699 {
700     CY_CRYPTO_VU_COND_CLSAME (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
701 }
702 
CY_CRYPTO_VU_COND_CTSAME(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)703 __STATIC_INLINE void CY_CRYPTO_VU_COND_CTSAME (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
704 {
705     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
706                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_CTSAME_OPC : CY_CRYPTO_VU2_CTSAME_OPC),
707                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
708                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
709                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
710                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
711 }
712 
CY_CRYPTO_VU_CTSAME(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)713 __STATIC_INLINE void CY_CRYPTO_VU_CTSAME (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
714 {
715     CY_CRYPTO_VU_COND_CTSAME (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
716 }
717 
CY_CRYPTO_VU_COND_SET_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)718 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
719 {
720     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
721                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_SET_BIT_OPC : CY_CRYPTO_VU2_SET_BIT_OPC),
722                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
723                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
724                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
725  }
726 
CY_CRYPTO_VU_SET_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)727 __STATIC_INLINE void CY_CRYPTO_VU_SET_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
728 {
729     CY_CRYPTO_VU_COND_SET_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
730 }
731 
CY_CRYPTO_VU_COND_CLR_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)732 __STATIC_INLINE void CY_CRYPTO_VU_COND_CLR_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
733 {
734     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
735                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_CLR_BIT_OPC : CY_CRYPTO_VU2_CLR_BIT_OPC),
736                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
737                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
738                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
739 }
740 
CY_CRYPTO_VU_CLR_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)741 __STATIC_INLINE void CY_CRYPTO_VU_CLR_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
742 {
743     CY_CRYPTO_VU_COND_CLR_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
744 }
745 
CY_CRYPTO_VU_COND_INV_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)746 __STATIC_INLINE void CY_CRYPTO_VU_COND_INV_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
747 {
748     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
749                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_INV_BIT_OPC : CY_CRYPTO_VU2_INV_BIT_OPC),
750                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
751                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
752                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
753 }
754 
CY_CRYPTO_VU_INV_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)755 __STATIC_INLINE void CY_CRYPTO_VU_INV_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
756 {
757     CY_CRYPTO_VU_COND_INV_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
758 }
759 
CY_CRYPTO_VU_COND_GET_BIT(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)760 __STATIC_INLINE void CY_CRYPTO_VU_COND_GET_BIT (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
761 {
762     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
763                                      (uint32_t)((CY_CRYPTO_V1) ? CY_CRYPTO_VU1_GET_BIT_OPC : CY_CRYPTO_VU2_GET_BIT_OPC),
764                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
765                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
766                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
767                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
768 }
769 
CY_CRYPTO_VU_GET_BIT(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)770 __STATIC_INLINE void CY_CRYPTO_VU_GET_BIT (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
771 {
772     CY_CRYPTO_VU_COND_GET_BIT (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
773 }
774 
775 /******************************************************************************/
CY_CRYPTO_VU_COND_SET_BIT_IMM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t imm13)776 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_BIT_IMM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t imm13)
777 {
778     if (CY_CRYPTO_V1)
779     {
780         /******** V1 ********/
781         uint32_t tmpReg = (rdst != CY_CRYPTO_VU_HW_REG14) ? CY_CRYPTO_VU_HW_REG14 : CY_CRYPTO_VU_HW_REG13;
782         uint32_t tmpData;
783         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
784 
785         /* Load 13 bit immediate value */
786         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm13, 13u);
787         CY_CRYPTO_VU_COND_SET_BIT(base, cc, rdst, tmpReg);
788 
789         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
790 
791         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
792     }
793     else
794     {
795         /******** V2 ********/
796             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
797                                          (uint32_t)CY_CRYPTO_VU2_SET_BIT_IMM_OPC,
798                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
799                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC16_SHIFT) |
800                                         ((uint32_t)imm13 << CY_CRYPTO_RSRC0_SHIFT));
801     }
802 }
803 
CY_CRYPTO_VU_SET_BIT_IMM(CRYPTO_Type * base,uint32_t rdst,uint32_t imm13)804 __STATIC_INLINE void CY_CRYPTO_VU_SET_BIT_IMM (CRYPTO_Type *base, uint32_t rdst, uint32_t imm13)
805 {
806     CY_CRYPTO_VU_COND_SET_BIT_IMM(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, imm13);
807 }
808 
CY_CRYPTO_VU_COND_CLR_BIT_IMM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t imm13)809 __STATIC_INLINE void CY_CRYPTO_VU_COND_CLR_BIT_IMM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t imm13)
810 {
811     if (CY_CRYPTO_V1)
812     {
813         /******** V1 ********/
814         uint32_t tmpReg = (rdst != CY_CRYPTO_VU_HW_REG14) ? CY_CRYPTO_VU_HW_REG14 : CY_CRYPTO_VU_HW_REG13;
815         uint32_t tmpData;
816         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
817 
818         /* Load 13 bit immediate value */
819         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm13, 13u);
820         CY_CRYPTO_VU_COND_CLR_BIT(base, cc, rdst, tmpReg);
821 
822         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
823 
824         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
825     }
826     else
827     {
828         /******** V2 ********/
829             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
830                                          (uint32_t)CY_CRYPTO_VU2_CLR_BIT_IMM_OPC,
831                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
832                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC16_SHIFT) |
833                                         ((uint32_t)imm13 << CY_CRYPTO_RSRC0_SHIFT));
834     }
835 }
836 
CY_CRYPTO_VU_CLR_BIT_IMM(CRYPTO_Type * base,uint32_t rdst,uint32_t imm13)837 __STATIC_INLINE void CY_CRYPTO_VU_CLR_BIT_IMM (CRYPTO_Type *base, uint32_t rdst, uint32_t imm13)
838 {
839     CY_CRYPTO_VU_COND_CLR_BIT_IMM(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, imm13);
840 }
841 
CY_CRYPTO_VU_COND_INV_BIT_IMM(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t imm13)842 __STATIC_INLINE void CY_CRYPTO_VU_COND_INV_BIT_IMM (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t imm13)
843 {
844     if (CY_CRYPTO_V1)
845     {
846         /******** V1 ********/
847         uint32_t tmpReg = (rdst != CY_CRYPTO_VU_HW_REG14) ? CY_CRYPTO_VU_HW_REG14 : CY_CRYPTO_VU_HW_REG13;
848         uint32_t tmpData;
849         CY_CRYPTO_VU_SAVE_REG(base, tmpReg, &tmpData);
850 
851         /* Load 13 bit immediate value */
852         CY_CRYPTO_VU_SET_REG(base, tmpReg, imm13, 13u);
853         CY_CRYPTO_VU_COND_INV_BIT(base, cc, rdst, tmpReg);
854 
855         CY_CRYPTO_VU1_WAIT_FOR_COMPLETE(base);
856 
857         CY_CRYPTO_VU_RESTORE_REG(base, tmpReg, tmpData);
858     }
859     else
860     {
861         /******** V2 ********/
862             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
863                                          (uint32_t)CY_CRYPTO_VU2_INV_BIT_IMM_OPC,
864                                         ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
865                                         ((uint32_t)rdst  << CY_CRYPTO_RSRC16_SHIFT) |
866                                         ((uint32_t)imm13 << CY_CRYPTO_RSRC0_SHIFT));
867     }
868 }
869 
CY_CRYPTO_VU_INV_BIT_IMM(CRYPTO_Type * base,uint32_t rdst,uint32_t imm13)870 __STATIC_INLINE void CY_CRYPTO_VU_INV_BIT_IMM (CRYPTO_Type *base, uint32_t rdst, uint32_t imm13)
871 {
872     CY_CRYPTO_VU_COND_INV_BIT_IMM(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, imm13);
873 }
874 
875 /******************************************************************************/
CY_CRYPTO_VU_COND_TST(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc)876 __STATIC_INLINE void CY_CRYPTO_VU_COND_TST (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc)
877 {
878     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
879                                      (uint32_t)CY_CRYPTO_VU_TST_OPC,
880                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
881                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
882 }
883 
CY_CRYPTO_VU_TST(CRYPTO_Type * base,uint32_t rsrc)884 __STATIC_INLINE void CY_CRYPTO_VU_TST (CRYPTO_Type *base, uint32_t rsrc)
885 {
886     CY_CRYPTO_VU_COND_TST (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc);
887 }
888 
CY_CRYPTO_VU_COND_MOV(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)889 __STATIC_INLINE void CY_CRYPTO_VU_COND_MOV (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
890 {
891     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
892                                      (uint32_t)CY_CRYPTO_VU_MOV_OPC,
893                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
894                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
895                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
896 }
897 
CY_CRYPTO_VU_MOV(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)898 __STATIC_INLINE void CY_CRYPTO_VU_MOV (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
899 {
900     CY_CRYPTO_VU_COND_MOV (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
901 }
902 
CY_CRYPTO_VU_COND_XSQUARE(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)903 __STATIC_INLINE void CY_CRYPTO_VU_COND_XSQUARE (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
904 {
905     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
906                                      (uint32_t)CY_CRYPTO_VU_XSQUARE_OPC,
907                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
908                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
909                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
910 }
911 
CY_CRYPTO_VU_XSQUARE(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)912 __STATIC_INLINE void CY_CRYPTO_VU_XSQUARE (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
913 {
914     CY_CRYPTO_VU_COND_XSQUARE (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
915 }
916 
CY_CRYPTO_VU_COND_XMUL(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)917 __STATIC_INLINE void CY_CRYPTO_VU_COND_XMUL (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
918 {
919     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
920                                      (uint32_t)CY_CRYPTO_VU_XMUL_OPC,
921                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
922                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
923                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
924                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
925 }
926 
CY_CRYPTO_VU_XMUL(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)927 __STATIC_INLINE void CY_CRYPTO_VU_XMUL (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
928 {
929     CY_CRYPTO_VU_COND_XMUL (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
930 }
931 
932 
CY_CRYPTO_VU_COND_UMUL(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)933 __STATIC_INLINE void CY_CRYPTO_VU_COND_UMUL (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
934 {
935     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
936                                      (uint32_t)CY_CRYPTO_VU_UMUL_OPC,
937                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
938                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
939                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
940                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
941 }
942 
CY_CRYPTO_VU_UMUL(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)943 __STATIC_INLINE void CY_CRYPTO_VU_UMUL (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
944 {
945     CY_CRYPTO_VU_COND_UMUL (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
946 }
947 
CY_CRYPTO_VU_COND_USQUARE(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc)948 __STATIC_INLINE void CY_CRYPTO_VU_COND_USQUARE (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc)
949 {
950     if (CY_CRYPTO_V1)
951     {
952         /***** V1 *******/
953         CY_CRYPTO_VU_COND_UMUL(base, cc, rdst, rsrc, rsrc);
954     }
955     else
956     {
957         /***** V2 *******/
958             Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
959                                      (uint32_t)CY_CRYPTO_VU2_USQUARE_OPC,
960                                         ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
961                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT) |
962                                     ((uint32_t)rsrc << CY_CRYPTO_RSRC0_SHIFT));
963     }
964 }
965 
CY_CRYPTO_VU_USQUARE(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc)966 __STATIC_INLINE void CY_CRYPTO_VU_USQUARE (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc)
967 {
968     CY_CRYPTO_VU_COND_USQUARE(base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc);
969 }
970 
CY_CRYPTO_VU_COND_SET_TO_ZERO(CRYPTO_Type * base,uint32_t cc,uint32_t rdst)971 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_TO_ZERO (CRYPTO_Type *base, uint32_t cc, uint32_t rdst)
972 {
973     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
974                                      (uint32_t)CY_CRYPTO_VU_SET_TO_ZERO_OPC,
975                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
976                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT));
977 }
978 
CY_CRYPTO_VU_SET_TO_ZERO(CRYPTO_Type * base,uint32_t rdst)979 __STATIC_INLINE void CY_CRYPTO_VU_SET_TO_ZERO (CRYPTO_Type *base, uint32_t rdst)
980 {
981     CY_CRYPTO_VU_COND_SET_TO_ZERO (base, CY_CRYPTO_VU_COND_ALWAYS, rdst);
982 }
983 
CY_CRYPTO_VU_COND_SET_TO_ONE(CRYPTO_Type * base,uint32_t cc,uint32_t rdst)984 __STATIC_INLINE void CY_CRYPTO_VU_COND_SET_TO_ONE (CRYPTO_Type *base, uint32_t cc, uint32_t rdst)
985 {
986     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
987                                      (uint32_t)CY_CRYPTO_VU_SET_TO_ONE_OPC,
988                                     ((uint32_t)cc   << CY_CRYPTO_RSRC20_SHIFT) |
989                                     ((uint32_t)rdst << CY_CRYPTO_RSRC12_SHIFT));
990 }
991 
CY_CRYPTO_VU_SET_TO_ONE(CRYPTO_Type * base,uint32_t rdst)992 __STATIC_INLINE void CY_CRYPTO_VU_SET_TO_ONE (CRYPTO_Type *base, uint32_t rdst)
993 {
994     CY_CRYPTO_VU_COND_SET_TO_ONE (base, CY_CRYPTO_VU_COND_ALWAYS, rdst);
995 }
996 
CY_CRYPTO_VU_COND_ADD(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)997 __STATIC_INLINE void CY_CRYPTO_VU_COND_ADD (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
998 {
999     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1000                                      (uint32_t)CY_CRYPTO_VU_ADD_OPC,
1001                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1002                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1003                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1004                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1005 }
1006 
CY_CRYPTO_VU_ADD(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1007 __STATIC_INLINE void CY_CRYPTO_VU_ADD (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1008 {
1009     CY_CRYPTO_VU_COND_ADD (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1010 }
1011 
CY_CRYPTO_VU_COND_SUB(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1012 __STATIC_INLINE void CY_CRYPTO_VU_COND_SUB (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1013 {
1014     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING, (uint32_t)CY_CRYPTO_VU_SUB_OPC,
1015                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1016                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1017                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1018                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1019 }
1020 
CY_CRYPTO_VU_SUB(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1021 __STATIC_INLINE void CY_CRYPTO_VU_SUB (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1022 {
1023     CY_CRYPTO_VU_COND_SUB (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1024 }
1025 
CY_CRYPTO_VU_COND_OR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1026 __STATIC_INLINE void CY_CRYPTO_VU_COND_OR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1027 {
1028     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1029                                      (uint32_t)CY_CRYPTO_VU_OR_OPC,
1030                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1031                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1032                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1033                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1034 }
1035 
CY_CRYPTO_VU_OR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1036 __STATIC_INLINE void CY_CRYPTO_VU_OR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1037 {
1038     CY_CRYPTO_VU_COND_OR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1039 }
1040 
CY_CRYPTO_VU_COND_AND(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1041 __STATIC_INLINE void CY_CRYPTO_VU_COND_AND (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1042 {
1043     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1044                                      (uint32_t)CY_CRYPTO_VU_AND_OPC,
1045                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1046                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1047                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1048                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1049 }
1050 
CY_CRYPTO_VU_AND(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1051 __STATIC_INLINE void CY_CRYPTO_VU_AND (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1052 {
1053     CY_CRYPTO_VU_COND_AND (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1054 }
1055 
CY_CRYPTO_VU_COND_XOR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1056 __STATIC_INLINE void CY_CRYPTO_VU_COND_XOR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1057 {
1058     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING, (uint32_t)CY_CRYPTO_VU_XOR_OPC,
1059                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1060                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1061                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1062                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1063 }
1064 
CY_CRYPTO_VU_XOR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1065 __STATIC_INLINE void CY_CRYPTO_VU_XOR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1066 {
1067     CY_CRYPTO_VU_COND_XOR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1068 }
1069 
CY_CRYPTO_VU_COND_NOR(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1070 __STATIC_INLINE void CY_CRYPTO_VU_COND_NOR (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1071 {
1072     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1073                                      (uint32_t)CY_CRYPTO_VU_NOR_OPC,
1074                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1075                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1076                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1077                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1078 }
1079 
CY_CRYPTO_VU_NOR(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1080 __STATIC_INLINE void CY_CRYPTO_VU_NOR (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1081 {
1082     CY_CRYPTO_VU_COND_NOR (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1083 }
1084 
CY_CRYPTO_VU_COND_NAND(CRYPTO_Type * base,uint32_t cc,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1085 __STATIC_INLINE void CY_CRYPTO_VU_COND_NAND (CRYPTO_Type *base, uint32_t cc, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1086 {
1087     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1088                                      (uint32_t)CY_CRYPTO_VU_NAND_OPC,
1089                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1090                                     ((uint32_t)rdst  << CY_CRYPTO_RSRC12_SHIFT) |
1091                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1092                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1093 }
1094 
CY_CRYPTO_VU_NAND(CRYPTO_Type * base,uint32_t rdst,uint32_t rsrc1,uint32_t rsrc0)1095 __STATIC_INLINE void CY_CRYPTO_VU_NAND (CRYPTO_Type *base, uint32_t rdst, uint32_t rsrc1, uint32_t rsrc0)
1096 {
1097     CY_CRYPTO_VU_COND_NAND (base, CY_CRYPTO_VU_COND_ALWAYS, rdst, rsrc1, rsrc0);
1098 }
1099 
CY_CRYPTO_VU_COND_CMP_SUB(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc1,uint32_t rsrc0)1100 __STATIC_INLINE void CY_CRYPTO_VU_COND_CMP_SUB (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc1, uint32_t rsrc0)
1101 {
1102     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1103                                      (uint32_t)CY_CRYPTO_VU_CMP_SUB_OPC,
1104                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1105                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1106                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1107 }
1108 
CY_CRYPTO_VU_CMP_SUB(CRYPTO_Type * base,uint32_t rsrc1,uint32_t rsrc0)1109 __STATIC_INLINE void CY_CRYPTO_VU_CMP_SUB (CRYPTO_Type *base, uint32_t rsrc1, uint32_t rsrc0)
1110 {
1111     CY_CRYPTO_VU_COND_CMP_SUB (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc1, rsrc0);
1112 }
1113 
CY_CRYPTO_VU_COND_CMP_DEGREE(CRYPTO_Type * base,uint32_t cc,uint32_t rsrc1,uint32_t rsrc0)1114 __STATIC_INLINE void CY_CRYPTO_VU_COND_CMP_DEGREE (CRYPTO_Type *base, uint32_t cc, uint32_t rsrc1, uint32_t rsrc0)
1115 {
1116     Cy_Crypto_Core_Vu_RunInstr(base, CY_CRYPTO_SYNC_NON_BLOCKING,
1117                                      (uint32_t)CY_CRYPTO_VU_CMP_DEGREE_OPC,
1118                                     ((uint32_t)cc    << CY_CRYPTO_RSRC20_SHIFT) |
1119                                     ((uint32_t)rsrc1 << CY_CRYPTO_RSRC4_SHIFT)  |
1120                                     ((uint32_t)rsrc0 << CY_CRYPTO_RSRC0_SHIFT));
1121 }
1122 
CY_CRYPTO_VU_CMP_DEGREE(CRYPTO_Type * base,uint32_t rsrc1,uint32_t rsrc0)1123 __STATIC_INLINE void CY_CRYPTO_VU_CMP_DEGREE (CRYPTO_Type *base, uint32_t rsrc1, uint32_t rsrc0)
1124 {
1125     CY_CRYPTO_VU_COND_CMP_DEGREE (base, CY_CRYPTO_VU_COND_ALWAYS, rsrc1, rsrc0);
1126 }
1127 
CY_CRYPTO_VU_SAVE_REG(CRYPTO_Type * base,uint32_t rsrc,uint32_t * data)1128 __STATIC_INLINE void CY_CRYPTO_VU_SAVE_REG (CRYPTO_Type *base, uint32_t rsrc, uint32_t *data)
1129 {
1130     *data = REG_CRYPTO_VU_RF_DATA(base, rsrc);
1131 }
1132 
CY_CRYPTO_VU_RESTORE_REG(CRYPTO_Type * base,uint32_t rdst,uint32_t data)1133 __STATIC_INLINE void CY_CRYPTO_VU_RESTORE_REG (CRYPTO_Type *base, uint32_t rdst, uint32_t data)
1134 {
1135     CY_CRYPTO_VU_SET_REG(base, rdst, CY_CRYPTO_VU_GET_REG_DATA(data), CY_CRYPTO_VU_GET_REG_SIZE(data) + 1u);
1136 }
1137 
1138 
1139 #endif /* #if (CPUSS_CRYPTO_VU == 1) */
1140 
1141 CY_MISRA_BLOCK_END('MISRA C-2012 Rule 14.3')
1142 
1143 #if defined(__cplusplus)
1144 }
1145 #endif
1146 
1147 #endif /* CY_IP_MXCRYPTO */
1148 
1149 
1150 #endif /* #if !defined (CY_CRYPTO_CORE_HW_VU_H) */
1151