1 /*
2 * Copyright (c) 2021-2024, The TrustedFirmware-M Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8 #include "cc3xx_aes.h"
9
10 #include "cc3xx_dev.h"
11 #include "cc3xx_dma.h"
12 #include "cc3xx_hash.h"
13 #include "cc3xx_lcs.h"
14 #include "cc3xx_engine_state.h"
15 #include "cc3xx_endian_helpers.h"
16 #include "cc3xx_stdlib.h"
17
18 #include <assert.h>
19 #include <stdbool.h>
20 #include <stdint.h>
21
22 #include "fatal_error.h"
23
24 #ifdef CC3XX_CONFIG_AES_EXTERNAL_KEY_LOADER
25 #include "cc3xx_aes_external_key_loader.h"
26 #endif /* CC3XX_CONFIG_AES_EXTERNAL_KEY_LOADER */
27
28 struct cc3xx_aes_state_t aes_state;
29
get_key_size_bytes(cc3xx_aes_keysize_t key_size)30 static inline size_t get_key_size_bytes(cc3xx_aes_keysize_t key_size)
31 {
32 return 16 + key_size * 8;
33 }
34
35 /* We don't offer CBC_MAC as a standalone mode, so define it here instead of in
36 * the header
37 */
38 #define CC3XX_AES_MODE_CBC_MAC 0b0011U
39
40 #ifndef CC3XX_CONFIG_AES_EXTERNAL_KEY_LOADER
check_key_lock(cc3xx_aes_key_id_t key_id)41 static cc3xx_err_t check_key_lock(cc3xx_aes_key_id_t key_id)
42 {
43 cc3xx_err_t err = CC3XX_ERR_SUCCESS;
44 cc3xx_lcs_t lcs;
45
46 switch (key_id) {
47 case CC3XX_AES_KEY_ID_HUK:
48 break;
49 case CC3XX_AES_KEY_ID_KRTL:
50 err = cc3xx_lowlevel_lcs_get(&lcs);
51 if (err != CC3XX_ERR_SUCCESS) {
52 return err;
53 }
54 /* The RTL key is only valid in certain states */
55 if (! (lcs == (cc3xx_lcs_cm | cc3xx_lcs_dm))) {
56 FATAL_ERR(CC3XX_ERR_INVALID_LCS);
57 return CC3XX_ERR_INVALID_LCS;
58 }
59 break;
60 case CC3XX_AES_KEY_ID_KCP:
61 if (P_CC3XX->ao.host_ao_lock_bits & (0x1U << 3)) {
62 FATAL_ERR(CC3XX_ERR_INVALID_STATE);
63 return CC3XX_ERR_INVALID_STATE;
64 }
65 break;
66 case CC3XX_AES_KEY_ID_KCE:
67 if (P_CC3XX->ao.host_ao_lock_bits & (0x1U << 4)) {
68 FATAL_ERR(CC3XX_ERR_INVALID_STATE);
69 return CC3XX_ERR_INVALID_STATE;
70 }
71 break;
72 case CC3XX_AES_KEY_ID_KPICV:
73 if (P_CC3XX->ao.host_ao_lock_bits & (0x1U << 1)) {
74 FATAL_ERR(CC3XX_ERR_INVALID_STATE);
75 return CC3XX_ERR_INVALID_STATE;
76 }
77 break;
78 case CC3XX_AES_KEY_ID_KCEICV:
79 if (P_CC3XX->ao.host_ao_lock_bits & (0x1U << 2)) {
80 FATAL_ERR(CC3XX_ERR_INVALID_STATE);
81 return CC3XX_ERR_INVALID_STATE;
82 }
83 break;
84 case CC3XX_AES_KEY_ID_USER_KEY:
85 break;
86 default:
87 return CC3XX_ERR_SUCCESS;
88 }
89
90 return CC3XX_ERR_SUCCESS;
91 }
92
set_key(cc3xx_aes_key_id_t key_id,const uint32_t * key,cc3xx_aes_keysize_t key_size,bool is_tun1)93 static cc3xx_err_t set_key(cc3xx_aes_key_id_t key_id, const uint32_t *key,
94 cc3xx_aes_keysize_t key_size, bool is_tun1)
95 {
96 cc3xx_err_t err = CC3XX_ERR_SUCCESS;
97 volatile uint32_t *hw_key_buf_ptr;
98
99 #if !defined(CC3XX_CONFIG_AES_CCM_ENABLE) || !defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE)
100 if (is_tun1) {
101 assert(0); /* Wrong programming for this driver configuration */
102 FATAL_ERR(CC3XX_ERR_INVALID_STATE);
103 return CC3XX_ERR_INVALID_STATE;
104 }
105 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE) */
106
107 if (!is_tun1) {
108 hw_key_buf_ptr = P_CC3XX->aes.aes_key_0;
109 } else {
110 hw_key_buf_ptr = P_CC3XX->aes.aes_key_1;
111 }
112
113 /* Set key0 size */
114 if (!is_tun1) {
115 P_CC3XX->aes.aes_control &= ~(0b11U << 12);
116 P_CC3XX->aes.aes_control |= (key_size & 0b11U) << 12;
117 } else {
118 /* Set key1 size */
119 P_CC3XX->aes.aes_control &= ~(0b11U << 14);
120 P_CC3XX->aes.aes_control |= (key_size & 0b11U) << 14;
121 }
122
123 if (key_id != CC3XX_AES_KEY_ID_USER_KEY) {
124 /* Check if the HOST_FATAL_ERROR mode is enabled */
125 if (P_CC3XX->ao.host_ao_lock_bits & 0x1U) {
126 FATAL_ERR(CC3XX_ERR_INVALID_STATE);
127 return CC3XX_ERR_INVALID_STATE;
128 }
129
130 /* Check if the key is masked / locked */
131 err = check_key_lock(key_id);
132 if (err != CC3XX_ERR_SUCCESS) {
133 return err;
134 }
135
136 /* Select the required key */
137 P_CC3XX->host_rgf.host_cryptokey_sel = key_id;
138
139 /* Trigger the load into the key registers */
140 if (!is_tun1) {
141 P_CC3XX->aes.aes_sk = 0b1U;
142 } else {
143 P_CC3XX->aes.aes_sk1 = 0b1U;
144 }
145 } else {
146 #ifdef CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE
147 size_t key_word_size = 4 + (key_size * 2);
148
149 cc3xx_dpa_hardened_word_copy(hw_key_buf_ptr, key, key_word_size - 1);
150 hw_key_buf_ptr[key_word_size - 1] = key[key_word_size - 1];
151 #else
152 hw_key_buf_ptr[0] = key[0];
153 hw_key_buf_ptr[1] = key[1];
154 hw_key_buf_ptr[2] = key[2];
155 hw_key_buf_ptr[3] = key[3];
156 if (key_size > CC3XX_AES_KEYSIZE_128) {
157 hw_key_buf_ptr[4] = key[4];
158 hw_key_buf_ptr[5] = key[5];
159 }
160 if (key_size > CC3XX_AES_KEYSIZE_192) {
161 hw_key_buf_ptr[6] = key[6];
162 hw_key_buf_ptr[7] = key[7];
163 }
164 #endif /* CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE */
165 }
166
167 return CC3XX_ERR_SUCCESS;
168 }
169 #endif /* !CC3XX_CONFIG_AES_EXTERNAL_KEY_LOADER */
170
set_iv(const uint32_t * iv)171 static void set_iv(const uint32_t *iv)
172 {
173 P_CC3XX->aes.aes_iv_0[0] = iv[0];
174 P_CC3XX->aes.aes_iv_0[1] = iv[1];
175 P_CC3XX->aes.aes_iv_0[2] = iv[2];
176 P_CC3XX->aes.aes_iv_0[3] = iv[3];
177 }
178
set_ctr(const uint32_t * ctr)179 static void set_ctr(const uint32_t *ctr)
180 {
181 P_CC3XX->aes.aes_ctr_0[0] = ctr[0];
182 P_CC3XX->aes.aes_ctr_0[1] = ctr[1];
183 P_CC3XX->aes.aes_ctr_0[2] = ctr[2];
184 P_CC3XX->aes.aes_ctr_0[3] = ctr[3];
185 }
186
get_iv(uint32_t * iv)187 static void get_iv(uint32_t *iv)
188 {
189 iv[0] = P_CC3XX->aes.aes_iv_0[0];
190 iv[1] = P_CC3XX->aes.aes_iv_0[1];
191 iv[2] = P_CC3XX->aes.aes_iv_0[2];
192 iv[3] = P_CC3XX->aes.aes_iv_0[3];
193 }
194
get_ctr(uint32_t * ctr)195 static void get_ctr(uint32_t *ctr)
196 {
197 ctr[0]= P_CC3XX->aes.aes_ctr_0[0];
198 ctr[1]= P_CC3XX->aes.aes_ctr_0[1];
199 ctr[2]= P_CC3XX->aes.aes_ctr_0[2];
200 ctr[3]= P_CC3XX->aes.aes_ctr_0[3];
201 }
202
203 #ifndef CC3XX_CONFIG_AES_CCM_ENABLE
204 inline
205 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
set_mode(cc3xx_aes_mode_t mode)206 static void set_mode(cc3xx_aes_mode_t mode)
207 {
208 /* Set mode field of AES control register */
209 P_CC3XX->aes.aes_control &= ~(0b111U << 2);
210 P_CC3XX->aes.aes_control |= (mode & 0b111U) << 2;
211 }
212
set_tun1_mode(cc3xx_aes_mode_t mode)213 static inline void set_tun1_mode(cc3xx_aes_mode_t mode)
214 {
215 /* Set mode field of AES control register */
216 P_CC3XX->aes.aes_control &= ~(0b111U << 5);
217 P_CC3XX->aes.aes_control |= (mode & 0b111U) << 5;
218 }
219
220 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
cc3xx_aes_reset_current_output_size(void)221 static void cc3xx_aes_reset_current_output_size(void)
222 {
223 dma_state.current_bytes_output = 0;
224 }
225
226 #ifdef CC3XX_CONFIG_AES_GCM_VARIABLE_IV_ENABLE
gcm_calc_initial_counter_from_iv(uint32_t * counter,const uint32_t * iv,size_t iv_len)227 static void gcm_calc_initial_counter_from_iv(uint32_t *counter,
228 const uint32_t *iv,
229 size_t iv_len)
230 {
231 uint32_t __attribute__((__aligned__(8)))
232 iv_block_buf[AES_GCM_FIELD_POINT_SIZE / sizeof(uint32_t)] = {0};
233 uint32_t zero_iv[AES_CTR_LEN / sizeof(uint32_t)] = {0};
234
235 /* Select HASH-only engine */
236 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_HASH);
237
238 /* This calculation is done from the zero field-point */
239 P_CC3XX->ghash.ghash_iv_0[0] = zero_iv[0];
240 P_CC3XX->ghash.ghash_iv_0[1] = zero_iv[1];
241 P_CC3XX->ghash.ghash_iv_0[2] = zero_iv[2];
242 P_CC3XX->ghash.ghash_iv_0[3] = zero_iv[3];
243
244 /* Feed IV into the DMA, padding with zeroes. */
245 cc3xx_lowlevel_dma_buffered_input_data(iv, iv_len, false);
246 cc3xx_lowlevel_dma_flush_buffer(true);
247
248 /* Make up the length block, 8 bytes of zeros and a big-endian 64-bit size
249 * of the IV in bits
250 */
251 memset(iv_block_buf, 0, sizeof(iv_block_buf));
252 ((uint64_t *)iv_block_buf)[1] = bswap_64((uint64_t)iv_len * 8);
253 cc3xx_lowlevel_dma_buffered_input_data(iv_block_buf, sizeof(iv_block_buf), false);
254 cc3xx_lowlevel_dma_flush_buffer(false);
255
256 /* Wait for the GHASH to complete */
257 while(P_CC3XX->ghash.ghash_busy) {}
258
259 /* Grab the result out of the GHASH buffer */
260 counter[0] = P_CC3XX->ghash.ghash_iv_0[0];
261 counter[1] = P_CC3XX->ghash.ghash_iv_0[1];
262 counter[2] = P_CC3XX->ghash.ghash_iv_0[2];
263 counter[3] = P_CC3XX->ghash.ghash_iv_0[3];
264 }
265 #endif /* CC3XX_CONFIG_AES_GCM_VARIABLE_IV_ENABLE */
266
gcm_init_iv(const uint32_t * iv,size_t iv_len)267 static void gcm_init_iv(const uint32_t *iv, size_t iv_len)
268 {
269 uint32_t zero_iv[AES_CTR_LEN / sizeof(uint32_t)] = {0};
270
271 /* The hash-key is an ECB-encrypted zero block. GCTR mode with a zero-ctr is
272 * the same as ECB
273 */
274 P_CC3XX->aes.aes_ctr_0[0] = zero_iv[0];
275 P_CC3XX->aes.aes_ctr_0[1] = zero_iv[1];
276 P_CC3XX->aes.aes_ctr_0[2] = zero_iv[2];
277 P_CC3XX->aes.aes_ctr_0[3] = zero_iv[3];
278
279 /* Encrypt the all-zero block to get the hash key */
280 cc3xx_lowlevel_dma_set_output(aes_state.ghash_key, sizeof(aes_state.ghash_key));
281 cc3xx_lowlevel_dma_buffered_input_data(zero_iv, sizeof(zero_iv), true);
282 cc3xx_lowlevel_dma_flush_buffer(false);
283
284 /* This is a preparatory operation so no need to count it in the output size */
285 cc3xx_aes_reset_current_output_size();
286
287 /* Set GHASH_INIT and set the key */
288 P_CC3XX->ghash.ghash_subkey_0[0] = aes_state.ghash_key[0];
289 P_CC3XX->ghash.ghash_subkey_0[1] = aes_state.ghash_key[1];
290 P_CC3XX->ghash.ghash_subkey_0[2] = aes_state.ghash_key[2];
291 P_CC3XX->ghash.ghash_subkey_0[3] = aes_state.ghash_key[3];
292 P_CC3XX->ghash.ghash_init = 0x1;
293
294 /* The 96-bit (12 byte) IV is a special case (IV || 0b0^31 || 0b1) */
295 if (iv_len == 12) {
296 aes_state.counter_0[0] = iv[0];
297 aes_state.counter_0[1] = iv[1];
298 aes_state.counter_0[2] = iv[2];
299 aes_state.counter_0[3] = bswap_32(0x1U);
300 } else if (iv_len > 0) {
301 #ifdef CC3XX_CONFIG_AES_GCM_VARIABLE_IV_ENABLE
302 /* Else GHASH the IV */
303 gcm_calc_initial_counter_from_iv(aes_state.counter_0, iv, iv_len);
304
305 /* Set up the GHASH block again so it's ready for the data */
306 P_CC3XX->ghash.ghash_init = 0x1;
307 #else
308 assert(false);
309 #endif /* CC3XX_CONFIG_AES_GCM_VARIABLE_IV_ENABLE */
310 }
311
312 /* Set the initial AES counter value to the incremented counter_0 */
313 set_ctr(aes_state.counter_0);
314 P_CC3XX->aes.aes_ctr_0[3] = bswap_32(bswap_32(aes_state.counter_0[3]) + 1);
315
316 /* Set the GHASH module start point to the zero-point */
317 P_CC3XX->ghash.ghash_iv_0[0] = 0;
318 P_CC3XX->ghash.ghash_iv_0[1] = 0;
319 P_CC3XX->ghash.ghash_iv_0[2] = 0;
320 P_CC3XX->ghash.ghash_iv_0[3] = 0;
321 }
322 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
323
324 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
ccm_init_iv(const uint32_t * iv,size_t iv_len)325 static inline void ccm_init_iv(const uint32_t *iv, size_t iv_len)
326 {
327
328 assert(iv_len >= 7 && iv_len <= 13);
329
330 /* Because CCM requires knowledge of the data length and the tag length
331 * before we can construct an IV, we defer the actual iv construction to
332 * later.
333 */
334 memcpy(aes_state.ccm_initial_iv_buf, iv, iv_len);
335 aes_state.ccm_initial_iv_size = iv_len;
336 }
337 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
338
init_from_state(void)339 static cc3xx_err_t init_from_state(void)
340 {
341 cc3xx_err_t err;
342
343 /* Enable the aes engine clock */
344 P_CC3XX->misc.aes_clk_enable = 0x1U;
345
346 /* Set the crypto engine to the AES engine */
347 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES);
348
349 /* If tunnelling is disabled, DFA mitigations are contolled by the
350 * HOST_FORCE_DFA_ENABLE switch. If tunnelling is enabled, then they are
351 * controlled here, and enabled for all non-tunnelling modes.
352 */
353 #if defined(CC3XX_CONFIG_DFA_MITIGATIONS_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE)
354 assert(!(P_CC3XX->ao.host_ao_lock_bits & (0b1U << 7)));
355
356 if (aes_state.mode == CC3XX_AES_MODE_CCM) {
357 P_CC3XX->aes.aes_dfa_is_on = 0x0U;
358 } else {
359 P_CC3XX->aes.aes_dfa_is_on = 0x1U;
360 }
361
362 #endif /* defined(CC3XX_CONFIG_DFA_MITIGATIONS_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE) */
363
364 /* Clear number of remaining bytes */
365 P_CC3XX->aes.aes_remaining_bytes = 0x0U;
366
367 /* Set direction field of AES control register */
368 P_CC3XX->aes.aes_control &= ~0b1U;
369 P_CC3XX->aes.aes_control |= (aes_state.direction & 0b1U);
370
371 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
372 if (aes_state.mode == CC3XX_AES_MODE_CMAC) {
373 /* Reset to encrypt direction. Decryption is undefined in MAC modes.
374 * This must be done before the key is set.
375 */
376 P_CC3XX->aes.aes_control &= ~0b1U;
377 }
378 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
379
380 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
381 if (aes_state.mode == CC3XX_AES_MODE_CCM) {
382
383 /* Reset to encrypt direction. Decryption is undefined in MAC modes.
384 * This must be done before the key is set.
385 */
386 P_CC3XX->aes.aes_control &= ~0b1U;
387
388 #ifdef CC3XX_CONFIG_AES_TUNNELLING_ENABLE
389 /* Set TUN1 key to same key as TUN0 */
390 err = set_key(aes_state.key_id,
391 aes_state.state_contains_key ? aes_state.key_buf : NULL,
392 aes_state.key_size, true);
393 if (err != CC3XX_ERR_SUCCESS) {
394 return err;
395 }
396 #endif /* CC3XX_CONFIG_AES_TUNNELLING_ENABLE */
397 }
398 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
399
400 /* Set the mode field */
401 set_mode(aes_state.mode);
402
403 /* Clear mode_is_cbc_cts field of control register */
404 P_CC3XX->aes.aes_control &= ~(0b1U << 1);
405
406 err = set_key(aes_state.key_id,
407 aes_state.state_contains_key ? aes_state.key_buf : NULL,
408 aes_state.key_size, false);
409 if (err != CC3XX_ERR_SUCCESS) {
410 return err;
411 }
412
413 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
414 if (aes_state.mode == CC3XX_AES_MODE_CMAC) {
415 /* Kick the CC to derive K1 and K2 */
416 P_CC3XX->aes.aes_cmac_init = 0b1U;
417 }
418 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
419
420 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
421 if (aes_state.mode == CC3XX_AES_MODE_GCM) {
422 /* Set mode_is_cbc_cts field of control register */
423 P_CC3XX->aes.aes_control |= (0b1U) << 1;
424
425 /* Set up the GHASH block. First enable hash clock */
426 P_CC3XX->misc.hash_clk_enable = 0x1;
427
428 /* Set hash to the GHASH module */
429 P_CC3XX->hash.hash_sel_aes_mac = 0b10U;
430 }
431 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
432
433 return CC3XX_ERR_SUCCESS;
434 }
435
cc3xx_lowlevel_aes_init(cc3xx_aes_direction_t direction,cc3xx_aes_mode_t mode,cc3xx_aes_key_id_t key_id,const uint32_t * key,cc3xx_aes_keysize_t key_size,const uint32_t * iv,size_t iv_len)436 cc3xx_err_t cc3xx_lowlevel_aes_init(
437 cc3xx_aes_direction_t direction,
438 cc3xx_aes_mode_t mode, cc3xx_aes_key_id_t key_id,
439 const uint32_t *key, cc3xx_aes_keysize_t key_size,
440 const uint32_t *iv, size_t iv_len)
441 {
442 cc3xx_err_t err;
443
444 /* Check alignment */
445 assert(((uintptr_t)key & 0b11) == 0);
446 assert(((uintptr_t)iv & 0b11) == 0);
447
448 /* Check larger keys are supported */
449 assert(P_CC3XX->host_rgf.host_boot & (1 << 28)); /* SUPPORT_256_192_KEY_LOCAL */
450 assert(P_CC3XX->aes.aes_hw_flags & 1); /* SUPPORT_256_192_KEY */
451
452 #ifdef CC3XX_CONFIG_DFA_MITIGATIONS_ENABLE
453 /* Check if the DFA alarm is tripped, if applicable. This disables the AES
454 * block, so we have to return an error.
455 */
456 if (P_CC3XX->aes.aes_hw_flags & (0x1 << 12)
457 && P_CC3XX->aes.aes_dfa_err_status) {
458 FATAL_ERR(CC3XX_ERR_DFA_VIOLATION);
459 return CC3XX_ERR_DFA_VIOLATION;
460 }
461 #endif /* CC3XX_CONFIG_DFA_MITIGATIONS_ENABLE */
462
463 /* Get a clean starting state */
464 cc3xx_lowlevel_aes_uninit();
465
466 aes_state.mode = mode;
467 aes_state.direction = direction;
468
469 aes_state.key_id = key_id;
470 aes_state.key_size = key_size;
471 if (key != NULL) {
472 aes_state.state_contains_key = true;
473 #ifdef CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE
474 cc3xx_dpa_hardened_word_copy((uint32_t *)aes_state.key_buf, key,
475 get_key_size_bytes(key_size) / sizeof(uint32_t));
476 #else
477 memcpy(aes_state.key_buf, key, get_key_size_bytes(key_size));
478 #endif /* CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE */
479 }
480
481 cc3xx_lowlevel_dma_set_buffer_size(16);
482
483 err = init_from_state();
484 if (err != CC3XX_ERR_SUCCESS) {
485 return err;
486 }
487
488 switch (mode) {
489 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
490 case CC3XX_AES_MODE_CMAC:
491 /* No IV to set up for CMAC */
492 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
493 #ifdef CC3XX_CONFIG_AES_ECB_ENABLE
494 case CC3XX_AES_MODE_ECB:
495 /* No IV to set up for ECB */
496 #endif /* CC3XX_CONFIG_AES_ECB_ENABLE */
497 break;
498 #ifdef CC3XX_CONFIG_AES_CTR_ENABLE
499 case CC3XX_AES_MODE_CTR:
500 assert(iv_len == 16);
501 set_ctr(iv);
502 break;
503 #endif /* CC3XX_CONFIG_AES_CTR_ENABLE */
504 #ifdef CC3XX_CONFIG_AES_CBC_ENABLE
505 case CC3XX_AES_MODE_CBC:
506 assert(iv_len == 16);
507 set_iv(iv);
508 break;
509 #endif /* CC3XX_CONFIG_AES_CBC_ENABLE */
510 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
511 case CC3XX_AES_MODE_GCM:
512 gcm_init_iv(iv, iv_len);
513 break;
514 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
515 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
516 case CC3XX_AES_MODE_CCM:
517 ccm_init_iv(iv, iv_len);
518 break;
519 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
520 default:
521 FATAL_ERR(CC3XX_ERR_NOT_IMPLEMENTED);
522 return CC3XX_ERR_NOT_IMPLEMENTED;
523 }
524
525 return CC3XX_ERR_SUCCESS;
526 }
527
cc3xx_lowlevel_aes_get_state(struct cc3xx_aes_state_t * state)528 void cc3xx_lowlevel_aes_get_state(struct cc3xx_aes_state_t *state)
529 {
530 #ifdef CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE
531 memcpy(state, &aes_state, sizeof(*state));
532 cc3xx_dpa_hardened_word_copy(state->key_buf,
533 aes_state.key_buf,
534 sizeof(state->key_buf) / sizeof(uint32_t));
535 #else
536 memcpy(state, &aes_state, sizeof(*state));
537 #endif /* CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE */
538
539 /* Fill the static aes_state with */
540 get_iv(state->iv);
541 get_ctr(state->ctr);
542
543 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
544 state->gcm_field_point[0] = P_CC3XX->ghash.ghash_iv_0[0];
545 state->gcm_field_point[1] = P_CC3XX->ghash.ghash_iv_0[1];
546 state->gcm_field_point[2] = P_CC3XX->ghash.ghash_iv_0[2];
547 state->gcm_field_point[3] = P_CC3XX->ghash.ghash_iv_0[3];
548 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
549
550 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE)
551 state->tun1_iv[0] = P_CC3XX->aes.aes_iv_1[0];
552 state->tun1_iv[1] = P_CC3XX->aes.aes_iv_1[1];
553 state->tun1_iv[2] = P_CC3XX->aes.aes_iv_1[2];
554 state->tun1_iv[3] = P_CC3XX->aes.aes_iv_1[3];
555 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE) */
556
557 memcpy(&state->dma_state, &dma_state, sizeof(state->dma_state));
558 }
559
cc3xx_lowlevel_aes_set_state(const struct cc3xx_aes_state_t * state)560 cc3xx_err_t cc3xx_lowlevel_aes_set_state(const struct cc3xx_aes_state_t *state)
561 {
562 cc3xx_err_t err;
563
564 #ifdef CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE
565 memcpy(&aes_state, state, sizeof(*state));
566 cc3xx_dpa_hardened_word_copy(aes_state.key_buf,
567 state->key_buf,
568 sizeof(state->key_buf) / sizeof(uint32_t));
569 #else
570 memcpy(&aes_state, state, sizeof(*state));
571 #endif /* CC3XX_CONFIG_DPA_MITIGATIONS_ENABLE */
572
573 err = init_from_state();
574 if (err != CC3XX_ERR_SUCCESS) {
575 return err;
576 }
577
578 /* Fill the static aes_state with */
579 set_iv(state->iv);
580 set_ctr(state->ctr);
581 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
582 P_CC3XX->ghash.ghash_iv_0[0] = state->gcm_field_point[0];
583 P_CC3XX->ghash.ghash_iv_0[1] = state->gcm_field_point[1];
584 P_CC3XX->ghash.ghash_iv_0[2] = state->gcm_field_point[2];
585 P_CC3XX->ghash.ghash_iv_0[3] = state->gcm_field_point[3];
586
587 P_CC3XX->ghash.ghash_subkey_0[0] = state->ghash_key[0];
588 P_CC3XX->ghash.ghash_subkey_0[1] = state->ghash_key[1];
589 P_CC3XX->ghash.ghash_subkey_0[2] = state->ghash_key[2];
590 P_CC3XX->ghash.ghash_subkey_0[3] = state->ghash_key[3];
591 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
592
593 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE)
594 P_CC3XX->aes.aes_iv_1[0] = state->tun1_iv[0];
595 P_CC3XX->aes.aes_iv_1[1] = state->tun1_iv[1];
596 P_CC3XX->aes.aes_iv_1[2] = state->tun1_iv[2];
597 P_CC3XX->aes.aes_iv_1[3] = state->tun1_iv[3];
598 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) && defined(CC3XX_CONFIG_AES_TUNNELLING_ENABLE) */
599
600 memcpy(&dma_state, &state->dma_state, sizeof(dma_state));
601
602 return CC3XX_ERR_SUCCESS;
603 }
604
cc3xx_lowlevel_aes_set_output_buffer(uint8_t * out,size_t out_len)605 void cc3xx_lowlevel_aes_set_output_buffer(uint8_t *out, size_t out_len)
606 {
607 cc3xx_lowlevel_dma_set_output(out, out_len);
608 }
609
cc3xx_lowlevel_aes_set_tag_len(uint32_t tag_len)610 void cc3xx_lowlevel_aes_set_tag_len(uint32_t tag_len)
611 {
612 /* This is only needed if there is an AEAD/MAC mode enabled */
613 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE) \
614 || defined (CC3XX_CONFIG_AES_GCM_ENABLE) \
615 || defined (CC3XX_CONFIG_AES_CMAC_ENABLE)
616 aes_state.aes_tag_len = tag_len;
617
618 switch (aes_state.mode) {
619 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
620 case CC3XX_AES_MODE_CCM:
621 /* NIST SP800-38C recommends 8 as a lower bound. IEEE 802.15 specifies
622 * that 0, 4, 6, 8, 10, 12, 14, 16 are valid for CCM*.
623 */
624 assert(tag_len <= 16 && !(tag_len & 0b1) && tag_len != 2);
625 break;
626 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
627 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
628 case CC3XX_AES_MODE_GCM:
629 /* NIST SP800-38D recommends 12 as a lower bound. */
630 assert(tag_len >= 12 && tag_len <= 16);
631 break;
632 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
633 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
634 case CC3XX_AES_MODE_CMAC:
635 /* NIST SP800-38B recommends 8 as a lower bound. */
636 assert(tag_len >= 8 && tag_len <= 16);
637 break;
638 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
639 default:
640 break;
641 }
642 #endif
643 }
644
cc3xx_lowlevel_aes_set_data_len(uint32_t to_crypt_len,uint32_t to_auth_len)645 void cc3xx_lowlevel_aes_set_data_len(uint32_t to_crypt_len, uint32_t to_auth_len)
646 {
647 /* CCM is the only mode that _needs_ to know this information upfront (to
648 * calculate the IV) - GCM etc we can set the aes_remaining_bytes just
649 * before finally flushing the block buffer.
650 */
651 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE)
652 aes_state.aes_to_auth_len = to_auth_len;
653 aes_state.aes_to_crypt_len = to_crypt_len;
654 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) */
655 }
656
cc3xx_lowlevel_aes_get_current_output_size(void)657 size_t cc3xx_lowlevel_aes_get_current_output_size(void)
658 {
659 return dma_state.current_bytes_output;
660 }
661
662 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
ccm_calc_iv(bool from_auth)663 static void ccm_calc_iv(bool from_auth)
664 {
665 uint8_t __attribute__((__aligned__(4))) b0_block[AES_BLOCK_SIZE] = {0};
666 uint64_t crypt_length_be = bswap_64((uint64_t)aes_state.aes_to_crypt_len);
667
668 /* The maximum size of the counter is determined by the size of the IV
669 * (since they must both fit into one block (with a byte of padding).
670 * Shorter IVs allow more data to be processed by the cipher before the
671 * counter overflows (which is unacceptable).
672 */
673 uint32_t q = 15 - aes_state.ccm_initial_iv_size;
674
675 /* Store q-1 & 0b111 where q=(15 - ccm_initial_iv_size) in the lowest 3
676 * bits. q is the size of Q in bytes. Q is the size of the message in bytes.
677 */
678 b0_block[0] |= (q - 1) & 0b111;
679
680 /* Store t mod 0b111 where t=((ccm_tag_len - 2)/2) in the next 3 bytes. Tag
681 * lengths under 4 are not acceptable, but CCM* allows a special-case tag
682 * length of 0 which is encoded as 0b000. The case where aes_tag_len is 2 is
683 * not checked here, but must be by the caller. */
684 if (aes_state.aes_tag_len >= 4) {
685 b0_block[0] |= (((aes_state.aes_tag_len - 2) / 2) & 0b111) << 3;
686 }
687
688 /* Set the AAD field if this was called from update_authed_data (this is why
689 * we had to defer this calculation).
690 */
691 if (from_auth) {
692 b0_block[0] |= 0b1 << 6;
693 }
694
695 /* Construct the b0 block using the flags byte, the IV, and the length of
696 * the data to be encrypted.
697 */
698 memcpy(b0_block + 1, aes_state.ccm_initial_iv_buf,
699 aes_state.ccm_initial_iv_size);
700 memcpy(b0_block + 1 + aes_state.ccm_initial_iv_size,
701 ((uint8_t *)&crypt_length_be) + sizeof(crypt_length_be) - q, q);
702
703 /* Input the B0 block into the CBC-MAC */
704 cc3xx_lowlevel_dma_buffered_input_data(b0_block, sizeof(b0_block), false);
705
706 /* The initial counter value is the same construction as b0, except that the
707 * ciphertext length is set to 0 and used at the counter (a neat way to
708 * avoid the iv and the ctr ever having the same value). We also need to
709 * zero some of the field of the flags byte.
710 */
711 memset(b0_block + 1 + aes_state.ccm_initial_iv_size, 0, q);
712 b0_block[0] &= 0b111;
713
714 /* Save the b0 block for encrypting the last block (similarly to GCM) */
715 memcpy(aes_state.counter_0, b0_block, sizeof(aes_state.counter_0));
716
717 /* Increment and set the counter */
718 b0_block[15] = 0x1;
719 set_ctr((uint32_t *) b0_block);
720 }
721 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
722
configure_engine_for_authed_data(bool * write_output)723 static void configure_engine_for_authed_data(bool *write_output)
724 {
725 switch (aes_state.mode) {
726 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
727 case CC3XX_AES_MODE_GCM:
728 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_HASH);
729 break;
730 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
731 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
732 case CC3XX_AES_MODE_CMAC:
733 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
734 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE)
735 case CC3XX_AES_MODE_CCM:
736 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) */
737 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES);
738 break;
739 default:
740 return;
741 }
742
743 *write_output = false;
744 }
745
746 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
ccm_input_auth_length(void)747 static size_t ccm_input_auth_length(void)
748 {
749 uint8_t auth_length_buf[6];
750 uint64_t auth_length_be = bswap_64((uint64_t)aes_state.aes_to_auth_len);
751 size_t auth_length_byte_length;
752
753 /* 65280 = 2^16 - 2^8 */
754 if (aes_state.aes_to_auth_len < 65280) {
755 memcpy(auth_length_buf,
756 ((uint8_t *)&auth_length_be) + sizeof(auth_length_be) - 2, 2);
757 auth_length_byte_length = 2;
758 } else {
759 auth_length_buf[0] = 0xFF;
760 auth_length_buf[1] = 0xFE;
761 memcpy(auth_length_buf,
762 ((uint8_t *)&auth_length_be) + sizeof(auth_length_be) - 4, 4);
763 auth_length_byte_length = 6;
764 }
765
766 cc3xx_lowlevel_dma_buffered_input_data(auth_length_buf, auth_length_byte_length, false);
767 return auth_length_byte_length;
768 }
769 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
770
cc3xx_lowlevel_aes_update_authed_data(const uint8_t * in,size_t in_len)771 void cc3xx_lowlevel_aes_update_authed_data(const uint8_t* in, size_t in_len)
772 {
773 bool write_output;
774
775 if (in_len == 0) {
776 return;
777 }
778
779 switch (aes_state.mode) {
780 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
781 case CC3XX_AES_MODE_GCM:
782 break;
783 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
784 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
785 case CC3XX_AES_MODE_CMAC:
786 P_CC3XX->aes.aes_remaining_bytes = in_len + AES_BLOCK_SIZE;
787 break;
788 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
789 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE)
790 case CC3XX_AES_MODE_CCM:
791 if (aes_state.authed_length == 0) {
792 ccm_calc_iv(true);
793 aes_state.authed_length += ccm_input_auth_length();
794 }
795 break;
796 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) */
797 default:
798 return;
799 }
800
801 configure_engine_for_authed_data(&write_output);
802
803 aes_state.authed_length += in_len;
804 cc3xx_lowlevel_dma_buffered_input_data(in, in_len, write_output);
805 }
806
configure_engine_for_crypted_data(bool * write_output)807 static void configure_engine_for_crypted_data(bool *write_output)
808 {
809 switch (aes_state.mode) {
810 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
811 case CC3XX_AES_MODE_GCM:
812 if (aes_state.direction == CC3XX_AES_DIRECTION_ENCRYPT) {
813 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES_TO_HASH_AND_DOUT);
814 } else {
815 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES_AND_HASH);
816 }
817 break;
818 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
819 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
820 case CC3XX_AES_MODE_CMAC:
821 return;
822 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
823 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
824 case CC3XX_AES_MODE_CCM:
825 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
826 #ifdef CC3XX_CONFIG_AES_TUNNELLING_ENABLE
827 set_mode(CC3XX_AES_MODE_CTR);
828 set_tun1_mode(CC3XX_AES_MODE_CBC_MAC);
829 if (aes_state.direction == CC3XX_AES_DIRECTION_ENCRYPT) {
830 /* Set AES_TUN_B1_USES_PADDED_DATA_IN, AES_TUNNEL0_ENCRYPT and
831 * AES_OUTPUT_MID_TUNNEL_DATA */
832 P_CC3XX->aes.aes_control |= 0b111 << 23;
833 } else {
834 /* Set AES_OUTPUT_MID_TUNNEL_DATA and AES_TUNNEL_B1_PAD_EN */
835 P_CC3XX->aes.aes_control |= 0b11 << 25;
836 /* Unset AES_TUNNEL0_ENCRYPT */
837 P_CC3XX->aes.aes_control &= ~(0b1 << 24);
838 }
839
840 /* Set AES_TUNNEL_IS_ON */
841 P_CC3XX->aes.aes_control |= 0B1U << 10;
842
843 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES);
844 #else
845 /* Withut tunnelling, we just perform CBC_MAC */
846 *write_output = false;
847 return;
848 #endif /* CC3XX_CONFIG_AES_TUNNELLING_ENABLE */
849 break;
850 default:
851 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES);
852 }
853
854 *write_output = true;
855 }
856
cc3xx_lowlevel_aes_update(const uint8_t * in,size_t in_len)857 cc3xx_err_t cc3xx_lowlevel_aes_update(const uint8_t* in, size_t in_len)
858 {
859 cc3xx_err_t err;
860 bool write_output;
861
862 /* MAC modes have no concept of encryption/decryption
863 * so cc3xx_lowlevel_aes_update is a no-op.
864 */
865 switch (aes_state.mode) {
866 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
867 case CC3XX_AES_MODE_CMAC:
868 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
869 return CC3XX_ERR_SUCCESS;
870 default:
871 break;
872 }
873
874 /* If there is auth data input, then perform a zero-pad and flush before we
875 * change how the engine is set up.
876 */
877 if (aes_state.crypted_length == 0 && aes_state.authed_length != 0) {
878 configure_engine_for_authed_data(&write_output);
879 cc3xx_lowlevel_dma_flush_buffer(true);
880 }
881
882 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE)
883 if (aes_state.mode == CC3XX_AES_MODE_CCM) {
884 if (aes_state.authed_length == 0 && aes_state.crypted_length == 0) {
885 /* If the IV was not calculated already by update_authed_data(),
886 * then we perform the IV calculation now, but set the argument to
887 * indicate that there is no auth data in this operation.
888 */
889 ccm_calc_iv(false);
890 cc3xx_lowlevel_dma_flush_buffer(false);
891 }
892
893 #ifdef CC3XX_CONFIG_AES_TUNNELLING_ENABLE
894 /* For the authed data, CCM uses non-tunnelling CBC_MAC mode. Now we're
895 * switching to tunnelling for the crypted data, we need to copy the
896 * current state of the CBC_MAC to the tunnel 1 IV, since tunnel 0 will
897 * now be used for the CTR mode operation
898 */
899 if (aes_state.crypted_length == 0) {
900 P_CC3XX->aes.aes_iv_1[0] = P_CC3XX->aes.aes_iv_0[0];
901 P_CC3XX->aes.aes_iv_1[1] = P_CC3XX->aes.aes_iv_0[1];
902 P_CC3XX->aes.aes_iv_1[2] = P_CC3XX->aes.aes_iv_0[2];
903 P_CC3XX->aes.aes_iv_1[3] = P_CC3XX->aes.aes_iv_0[3];
904 }
905 #endif /* CC3XX_CONFIG_AES_TUNNELLING_ENABLE */
906 }
907 #endif /* defined(CC3XX_CONFIG_AES_CCM_ENABLE) */
908
909 configure_engine_for_crypted_data(&write_output);
910
911 aes_state.crypted_length += in_len;
912 err = cc3xx_lowlevel_dma_buffered_input_data(in, in_len, write_output);
913 if (err != CC3XX_ERR_SUCCESS) {
914 return err;
915 }
916
917 if (dma_state.block_buf_size_in_use == AES_BLOCK_SIZE) {
918 cc3xx_lowlevel_dma_flush_buffer(false);
919 }
920
921 return CC3XX_ERR_SUCCESS;
922 }
923
924 #if defined(CC3XX_CONFIG_AES_CCM_ENABLE) \
925 || defined (CC3XX_CONFIG_AES_GCM_ENABLE) \
926 || defined (CC3XX_CONFIG_AES_CMAC_ENABLE)
tag_cmp_or_copy(uint32_t * tag,uint32_t * calculated_tag)927 static cc3xx_err_t tag_cmp_or_copy(uint32_t *tag, uint32_t *calculated_tag)
928 {
929 uint32_t idx;
930 uint32_t tag_word_size = (aes_state.aes_tag_len + sizeof(uint32_t) - 1) / sizeof(uint32_t);
931 uint8_t permutation_buf[tag_word_size];
932 bool are_different = 0;
933
934 if (aes_state.direction == CC3XX_AES_DIRECTION_DECRYPT) {
935 cc3xx_random_permutation_generate(permutation_buf, tag_word_size);
936
937 for (idx = 0; idx < tag_word_size; idx++) {
938 are_different |= tag[permutation_buf[idx]] ^ calculated_tag[permutation_buf[idx]];
939 }
940 } else {
941 cc3xx_dpa_hardened_word_copy(tag, calculated_tag, tag_word_size);
942 }
943
944 if (are_different) {
945 FATAL_ERR(CC3XX_ERR_INVALID_TAG);
946 return CC3XX_ERR_INVALID_TAG;
947 } else {
948 return CC3XX_ERR_SUCCESS;
949 }
950 }
951 #endif
952
953 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
gcm_finish(uint32_t * tag)954 static cc3xx_err_t gcm_finish(uint32_t *tag)
955 {
956 uint64_t len_block[2];
957 uint32_t final_block[4];
958 uint32_t calculated_tag[4];
959
960 /* Create and input the length block into the GHASH engine. GCM measures all
961 * lengths in bits
962 */
963 len_block[0] = bswap_64((uint64_t)aes_state.authed_length * 8);
964 len_block[1] = bswap_64((uint64_t)aes_state.crypted_length * 8);
965
966 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_HASH);
967
968 /* Input the length block in GHASH */
969 cc3xx_lowlevel_dma_buffered_input_data(len_block, sizeof(len_block), false);
970 cc3xx_lowlevel_dma_flush_buffer(false);
971
972 /* Wait for the GHASH to finish */
973 while(P_CC3XX->ghash.ghash_busy){}
974
975 /* Set up CTR mode, using the saved counter 0 value */
976 set_ctr(aes_state.counter_0);
977
978 /* Encrypt the final GHASH output value */
979 final_block[0] = P_CC3XX->ghash.ghash_iv_0[0];
980 final_block[1] = P_CC3XX->ghash.ghash_iv_0[1];
981 final_block[2] = P_CC3XX->ghash.ghash_iv_0[2];
982 final_block[3] = P_CC3XX->ghash.ghash_iv_0[3];
983
984 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_AES);
985
986 cc3xx_lowlevel_dma_set_output(calculated_tag, sizeof(calculated_tag));
987 cc3xx_lowlevel_dma_buffered_input_data(final_block, AES_GCM_FIELD_POINT_SIZE,
988 true);
989 cc3xx_lowlevel_dma_flush_buffer(false);
990
991 while(P_CC3XX->aes.aes_busy) {}
992
993 return tag_cmp_or_copy(tag, calculated_tag);
994 }
995 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
996
997 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
cmac_finish(uint32_t * tag)998 static cc3xx_err_t cmac_finish(uint32_t *tag)
999 {
1000 uint32_t calculated_tag[AES_IV_LEN / sizeof(uint32_t)];
1001
1002 if (aes_state.authed_length == 0) {
1003 /* Special-case for when no data has been input. */
1004 P_CC3XX->aes.aes_cmac_size0_kick = 0b1U;
1005 }
1006
1007 /* The tag is just the final IV */
1008 get_iv(calculated_tag);
1009
1010 return tag_cmp_or_copy(tag, calculated_tag);
1011 }
1012 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
1013
1014 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
ccm_finish(uint32_t * tag)1015 static cc3xx_err_t ccm_finish(uint32_t *tag)
1016 {
1017 uint32_t calculated_tag[AES_IV_LEN / sizeof(uint32_t)];
1018
1019 /* If tunnelling is disabled, CCM mode is CBC_MAC with the special IV
1020 * calculations. Depending on whether tunnelling modes is enabled or not,
1021 * the IV that has the final value is different.
1022 */
1023 #ifdef CC3XX_CONFIG_AES_TUNNELLING_ENABLE
1024 calculated_tag[0] = P_CC3XX->aes.aes_iv_1[0];
1025 calculated_tag[1] = P_CC3XX->aes.aes_iv_1[1];
1026 calculated_tag[2] = P_CC3XX->aes.aes_iv_1[2];
1027 calculated_tag[3] = P_CC3XX->aes.aes_iv_1[3];
1028 #else
1029 get_iv(calculated_tag);
1030 #endif /* CC3XX_CONFIG_AES_TUNNELLING_ENABLE */
1031
1032 /* Finally, encrypt the IV value with the original counter 0 value. */
1033 set_ctr(aes_state.counter_0);
1034 set_mode(CC3XX_AES_MODE_CTR);
1035 cc3xx_lowlevel_dma_set_output(calculated_tag, sizeof(calculated_tag));
1036 cc3xx_lowlevel_dma_buffered_input_data(calculated_tag, sizeof(calculated_tag), true);
1037 cc3xx_lowlevel_dma_flush_buffer(false);
1038
1039 return tag_cmp_or_copy(tag, calculated_tag);
1040 }
1041 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
1042
cc3xx_lowlevel_aes_finish(uint32_t * tag,size_t * size)1043 cc3xx_err_t cc3xx_lowlevel_aes_finish(uint32_t *tag, size_t *size)
1044 {
1045 cc3xx_err_t err = CC3XX_ERR_SUCCESS;
1046 bool write_output;
1047
1048 /* Check alignment */
1049 assert(((uintptr_t)tag & 0b11) == 0);
1050
1051 /* The DMA buffer doesn't save the engine state when the block buffer was
1052 * created, so we need to configure the engine to the right state before the
1053 * final flush.
1054 */
1055 if (aes_state.crypted_length == 0 && aes_state.authed_length != 0) {
1056 configure_engine_for_authed_data(&write_output);
1057 } else if (aes_state.crypted_length != 0) {
1058 configure_engine_for_crypted_data(&write_output);
1059 }
1060
1061 /* Check how much data is in the DMA block buffer, and set
1062 * aes_remaining_bytes accordingly.
1063 */
1064 P_CC3XX->aes.aes_remaining_bytes = dma_state.block_buf_size_in_use;
1065
1066 /* Set remaining data to the amount of data in the DMA buffer */
1067
1068 /* ECB and CBC modes require padding since they can't have non-block-sized
1069 * ciphertexts. Other modes don't need padding.
1070 */
1071 switch (aes_state.mode) {
1072 #ifdef CC3XX_CONFIG_AES_ECB_ENABLE
1073 case CC3XX_AES_MODE_ECB:
1074 #endif /* CC3XX_CONFIG_AES_ECB_ENABLE */
1075 #ifdef CC3XX_CONFIG_AES_CBC_ENABLE
1076 case CC3XX_AES_MODE_CBC:
1077 #endif /* CC3XX_CONFIG_AES_CBC_ENABLE */
1078 cc3xx_lowlevel_dma_flush_buffer(true);
1079 break;
1080 default:
1081 cc3xx_lowlevel_dma_flush_buffer(false);
1082 break;
1083 }
1084
1085 /* Get the size before any tag is produced */
1086 if (size != NULL) {
1087 *size = cc3xx_lowlevel_aes_get_current_output_size();
1088 }
1089
1090 switch (aes_state.mode) {
1091 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
1092 case CC3XX_AES_MODE_GCM:
1093 err = gcm_finish(tag);
1094 break;
1095 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
1096 #ifdef CC3XX_CONFIG_AES_CMAC_ENABLE
1097 case CC3XX_AES_MODE_CMAC:
1098 err = cmac_finish(tag);
1099 break;
1100 #endif /* CC3XX_CONFIG_AES_CMAC_ENABLE */
1101 #ifdef CC3XX_CONFIG_AES_CCM_ENABLE
1102 case CC3XX_AES_MODE_CCM:
1103 #endif /* CC3XX_CONFIG_AES_CCM_ENABLE */
1104 err = ccm_finish(tag);
1105 break;
1106 default:
1107 break;
1108 }
1109
1110 cc3xx_lowlevel_aes_uninit();
1111
1112 return err;
1113 }
1114
cc3xx_lowlevel_aes_uninit(void)1115 void cc3xx_lowlevel_aes_uninit(void)
1116 {
1117 static const uint32_t zero_block[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
1118 memset(&aes_state, 0, sizeof(struct cc3xx_aes_state_t));
1119
1120 set_iv(zero_block);
1121 set_ctr(zero_block);
1122
1123 cc3xx_lowlevel_dma_uninit();
1124
1125 #ifdef CC3XX_CONFIG_AES_GCM_ENABLE
1126 cc3xx_lowlevel_hash_uninit();
1127 P_CC3XX->ghash.ghash_iv_0[0] = 0;
1128 P_CC3XX->ghash.ghash_iv_0[1] = 0;
1129 P_CC3XX->ghash.ghash_iv_0[2] = 0;
1130 P_CC3XX->ghash.ghash_iv_0[3] = 0;
1131 #endif /* CC3XX_CONFIG_AES_GCM_ENABLE */
1132
1133 /* Reset AES_CTRL */
1134 P_CC3XX->aes.aes_control = 0x0U;
1135
1136 /* Set the crypto engine back to the default PASSTHROUGH engine */
1137 cc3xx_lowlevel_set_engine(CC3XX_ENGINE_NONE);
1138
1139 /* Disable the AES clock */
1140 P_CC3XX->misc.aes_clk_enable = 0x0U;
1141 }
1142