1 /*
2 * Copyright (c) 2023 Renesas Electronics Corporation
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <zephyr/kernel.h>
8 #include <zephyr/device.h>
9 #include <zephyr/crypto/crypto.h>
10 #include <zephyr/irq.h>
11 #include <DA1469xAB.h>
12 #include <da1469x_config.h>
13 #include <da1469x_otp.h>
14 #include <system_DA1469x.h>
15 #include <da1469x_pd.h>
16 #include <zephyr/sys/byteorder.h>
17 #include <zephyr/pm/device.h>
18 #include <zephyr/pm/policy.h>
19 #include <zephyr/logging/log.h>
20
21 LOG_MODULE_REGISTER(crypto_smartbond_crypto, CONFIG_CRYPTO_LOG_LEVEL);
22
23 #define DT_DRV_COMPAT renesas_smartbond_crypto
24
25 #define SMARTBOND_IRQN DT_INST_IRQN(0)
26 #define SMARTBOND_IRQ_PRIO DT_INST_IRQ(0, priority)
27
28 #if defined(CONFIG_CRYPTO_ASYNC)
29 #define CRYPTO_HW_CAPS (CAP_RAW_KEY | CAP_SEPARATE_IO_BUFS | CAP_ASYNC_OPS | CAP_NO_IV_PREFIX)
30 #else
31 #define CRYPTO_HW_CAPS (CAP_RAW_KEY | CAP_SEPARATE_IO_BUFS | CAP_SYNC_OPS | CAP_NO_IV_PREFIX)
32 #endif
33
34 #define SWAP32(_w) __REV(_w)
35
36 #define CRYPTO_CTRL_REG_SET(_field, _val) \
37 AES_HASH->CRYPTO_CTRL_REG = \
38 (AES_HASH->CRYPTO_CTRL_REG & ~AES_HASH_CRYPTO_CTRL_REG_ ## _field ## _Msk) | \
39 ((_val) << AES_HASH_CRYPTO_CTRL_REG_ ## _field ## _Pos)
40
41 #define CRYPTO_CTRL_REG_GET(_field) \
42 ((AES_HASH->CRYPTO_CTRL_REG & AES_HASH_CRYPTO_CTRL_REG_ ## _field ## _Msk) >> \
43 AES_HASH_CRYPTO_CTRL_REG_ ## _field ## _Pos)
44
45
46 struct crypto_smartbond_data {
47 /*
48 * Semaphore to provide mutual exlusion when a crypto session is requested.
49 */
50 struct k_sem session_sem;
51
52 /*
53 * Semaphore to provide mutual exlusion when a cryptographic task is requested.
54 * (a session should be requested at this point).
55 */
56 struct k_sem device_sem;
57 #if defined(CONFIG_CRYPTO_ASYNC)
58 /*
59 * User-defined callbacks to be called upon completion of asynchronous
60 * cryptographic operations. Note that the AES and HASH modes can work
61 * complementary to each other.
62 */
63 union {
64 cipher_completion_cb cipher_user_cb;
65 hash_completion_cb hash_user_cb;
66 };
67
68 /*
69 * Packet context should be stored during a session so that can be rertieved
70 * from within the crypto engine ISR context.
71 */
72 union {
73 struct cipher_pkt *cipher_pkt;
74 struct hash_pkt *hash_pkt;
75 };
76 #else
77 /*
78 * Semaphore used to block for as long as a synchronous cryptographic operation
79 * is in progress.
80 */
81 struct k_sem sync_sem;
82 #endif
83 };
84
85 /*
86 * Status flag to indicate if the crypto engine resources have been granted. Note that the
87 * device integrates a single crypto engine instance.
88 */
89 static bool in_use;
90
91 static void crypto_smartbond_set_status(bool enable);
92
smartbond_crypto_isr(const void * arg)93 static void smartbond_crypto_isr(const void *arg)
94 {
95 struct crypto_smartbond_data *data = ((const struct device *)arg)->data;
96 uint32_t status = AES_HASH->CRYPTO_STATUS_REG;
97
98 if (status & AES_HASH_CRYPTO_STATUS_REG_CRYPTO_IRQ_ST_Msk) {
99 /* Clear interrupt source. Otherwise the handler will be fire constantly! */
100 AES_HASH->CRYPTO_CLRIRQ_REG = 0x1;
101
102 #if defined(CONFIG_CRYPTO_ASYNC)
103 /* Define the slected crypto mode (AES/HASH). */
104 if (AES_HASH->CRYPTO_CTRL_REG & AES_HASH_CRYPTO_CTRL_REG_CRYPTO_HASH_SEL_Msk) {
105 if (data->hash_user_cb) {
106 data->hash_user_cb(data->hash_pkt, status);
107 }
108 } else {
109 if (data->cipher_user_cb) {
110 data->cipher_user_cb(data->cipher_pkt, status);
111 }
112 }
113 #else
114 /* Designate the requested cryptographic tasks is finished. */
115 k_sem_give(&data->sync_sem);
116 #endif
117 }
118 }
119
crypto_smartbond_pm_policy_state_lock_get(const struct device * dev)120 static inline void crypto_smartbond_pm_policy_state_lock_get(const struct device *dev)
121 {
122 /*
123 * Prevent the SoC from entering the normal sleep state as PDC does not support
124 * waking up the application core following AES/HASH events.
125 */
126 pm_policy_state_lock_get(PM_STATE_STANDBY, PM_ALL_SUBSTATES);
127 }
128
crypto_smartbond_pm_policy_state_lock_put(const struct device * dev)129 static inline void crypto_smartbond_pm_policy_state_lock_put(const struct device *dev)
130 {
131 /* Allow the SoC to enter the normal sleep state once AES/HASH operations are done. */
132 pm_policy_state_lock_put(PM_STATE_STANDBY, PM_ALL_SUBSTATES);
133 }
134
crypto_smartbond_lock_session(const struct device * dev)135 static bool crypto_smartbond_lock_session(const struct device *dev)
136 {
137 bool lock = false;
138 struct crypto_smartbond_data *data = dev->data;
139
140 k_sem_take(&data->session_sem, K_FOREVER);
141
142 if (!in_use) {
143 in_use = true;
144 /* Prevent sleep as long as a cryptographic session is in place */
145 da1469x_pd_acquire(MCU_PD_DOMAIN_SYS);
146 crypto_smartbond_pm_policy_state_lock_get(dev);
147 crypto_smartbond_set_status(true);
148 lock = true;
149 }
150
151 k_sem_give(&data->session_sem);
152
153 return lock;
154 }
155
crypto_smartbond_unlock_session(const struct device * dev)156 static void crypto_smartbond_unlock_session(const struct device *dev)
157 {
158 struct crypto_smartbond_data *data = dev->data;
159
160 k_sem_take(&data->session_sem, K_FOREVER);
161
162 if (in_use) {
163 in_use = false;
164 crypto_smartbond_set_status(false);
165 crypto_smartbond_pm_policy_state_lock_put(dev);
166 da1469x_pd_release_nowait(MCU_PD_DOMAIN_SYS);
167 }
168
169 k_sem_give(&data->session_sem);
170 }
171
172 /*
173 * Input vector should comply with the following restrictions:
174 *
175 * mode | CRYPTO_MORE_IN = true | CRYPTO_MORE_IN = false
176 * ------------| -----------------------| ----------------------
177 * ECB | multiple of 16 (bytes) | multiple of 16 (bytes)
178 * CBC | multiple of 16 | no restrictions
179 * CTR | multiple of 16 | no restrictions
180 * MD5 | multiple of 8 | no restrictions
181 * SHA_1 | multiple of 8 | no restrictions
182 * SHA_256_224 | multiple of 8 | no restrictions
183 * SHA_256 | multiple of 8 | no restrictions
184 * SHA_384 | multiple of 8 | no restrictions
185 * SHA_512 | multiple of 8 | no restrictions
186 * SHA_512_224 | multiple of 8 | no restrictions
187 * SHA_512_256 | multiple of 8 | no restrictions
188 */
crypto_smartbond_check_in_restrictions(uint16_t in_len)189 static int crypto_smartbond_check_in_restrictions(uint16_t in_len)
190 {
191 #define CRYPTO_ALG_MD_ECB_MAGIC_0 0x00
192 #define CRYPTO_ALG_MD_ECB_MAGIC_1 0x01
193
194 bool not_last_in_block = !!(AES_HASH->CRYPTO_CTRL_REG &
195 AES_HASH_CRYPTO_CTRL_REG_CRYPTO_MORE_IN_Msk);
196
197 /* Define the slected crypto mode (AES/HASH). */
198 if (AES_HASH->CRYPTO_CTRL_REG & AES_HASH_CRYPTO_CTRL_REG_CRYPTO_HASH_SEL_Msk) {
199 if (not_last_in_block && (in_len & 0x7)) {
200 return -EINVAL;
201 }
202 } else {
203 if (in_len & 0xF) {
204 if (not_last_in_block) {
205 return -EINVAL;
206 }
207
208 uint32_t crypto_mode = CRYPTO_CTRL_REG_GET(CRYPTO_ALG_MD);
209
210 /* Check if AES mode is ECB */
211 if (crypto_mode == CRYPTO_ALG_MD_ECB_MAGIC_0 ||
212 crypto_mode == CRYPTO_ALG_MD_ECB_MAGIC_1) {
213 return -EINVAL;
214 }
215 }
216 }
217
218 return 0;
219 }
220
221 /*
222 * The driver model does not define the max. output length. As such, the max supported length
223 * per mode is applied.
224 */
crypto_smartbond_hash_set_out_len(void)225 static int crypto_smartbond_hash_set_out_len(void)
226 {
227 uint32_t hash_algo = (AES_HASH->CRYPTO_CTRL_REG & AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_Msk);
228
229 if (AES_HASH->CRYPTO_CTRL_REG & AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Msk) {
230 /* 64-bit HASH operations */
231 switch (hash_algo) {
232 case 0x0:
233 /* SHA-384: 0..47 --> 1..48 bytes */
234 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 47);
235 break;
236 case 0x1:
237 /* SHA-512: 0..63 --> 1..64 bytes */
238 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 63);
239 break;
240 case 0x2:
241 /* SHA-512/224: 0..27 --> 1..28 bytes */
242 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 27);
243 break;
244 case 0x3:
245 /* SHA-512/256: 0..31 --> 1..32 bytes */
246 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 31);
247 break;
248 default:
249 break;
250 }
251 } else {
252 /* 32-bit HASH operations */
253 switch (hash_algo) {
254 case 0x0:
255 /* MD5: 0..15 --> 1..16 bytes */
256 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 15);
257 break;
258 case 0x1:
259 /* SHA-1: 0..19 --> 1..20 bytes */
260 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 19);
261 break;
262 case 0x2:
263 /* SHA-256/224: 0..27 --> 1..28 bytes */
264 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 27);
265 break;
266 case 0x3:
267 /* SHA-256: 0..31 --> 1..32 bytes */
268 CRYPTO_CTRL_REG_SET(CRYPTO_HASH_OUT_LEN, 31);
269 break;
270 default:
271 break;
272 }
273 }
274
275 /* Return the OUT size applied. */
276 return CRYPTO_CTRL_REG_GET(CRYPTO_HASH_OUT_LEN) + 1;
277 }
278
crypto_smartbond_swap_word(uint8_t * data)279 static uint32_t crypto_smartbond_swap_word(uint8_t *data)
280 {
281 /* Check word boundaries of given address and if possible accellerate swapping */
282 if ((uint32_t)data & 0x3) {
283 return SWAP32(sys_get_le32(data));
284 } else {
285 return SWAP32(*(uint32_t *)data);
286 }
287 }
288
crypto_smartbond_cipher_key_load(uint8_t * key,uint16_t key_len)289 static int crypto_smartbond_cipher_key_load(uint8_t *key, uint16_t key_len)
290 {
291 if (key == NULL) {
292 return -EIO;
293 }
294
295 AES_HASH->CRYPTO_CTRL_REG &= ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_AES_KEY_SZ_Msk);
296
297 if (key_len == 32) {
298 AES_HASH->CRYPTO_CTRL_REG |=
299 (0x2 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_AES_KEY_SZ_Pos);
300 } else if (key_len == 24) {
301 AES_HASH->CRYPTO_CTRL_REG |=
302 (0x1 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_AES_KEY_SZ_Pos);
303 } else if (key_len == 16) {
304 /* Nothing to do */
305 } else {
306 return -EINVAL;
307 }
308
309 /* Key expansion is performed by the crypto engine */
310 AES_HASH->CRYPTO_CTRL_REG |= AES_HASH_CRYPTO_CTRL_REG_CRYPTO_AES_KEXP_Msk;
311
312 /* Check whether the cipher key is located in OTP (user keys segment) */
313 if (IS_ADDRESS_USER_DATA_KEYS_SEGMENT((uint32_t)key)) {
314
315 /* User keys segmnet can be accessed if not locked (stick bits are not set) */
316 if (CRG_TOP->SECURE_BOOT_REG & CRG_TOP_SECURE_BOOT_REG_PROT_AES_KEY_READ_Msk) {
317 return -EIO;
318 }
319
320 uint32_t cell_offset = da1469x_otp_address_to_cell_offset((uint32_t)key);
321
322 da1469x_otp_read(cell_offset,
323 (void *)&AES_HASH->CRYPTO_KEYS_START, (uint32_t)key_len);
324 } else {
325 volatile uint32_t *kmem_ptr = &AES_HASH->CRYPTO_KEYS_START;
326
327 do {
328 *(kmem_ptr++) = crypto_smartbond_swap_word(key);
329 key += 4;
330 key_len -= 4;
331 } while (key_len);
332 }
333
334 return 0;
335 }
336
crypto_smartbond_cipher_set_mode(enum cipher_mode mode)337 static int crypto_smartbond_cipher_set_mode(enum cipher_mode mode)
338 {
339 /* Select AES mode */
340 AES_HASH->CRYPTO_CTRL_REG &= ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Msk |
341 AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_Msk |
342 AES_HASH_CRYPTO_CTRL_REG_CRYPTO_HASH_SEL_Msk);
343 switch (mode) {
344 case CRYPTO_CIPHER_MODE_ECB:
345 /* Already done; CRYPTO_ALG_MD = 0x0 or 0x1 defines ECB. */
346 break;
347 case CRYPTO_CIPHER_MODE_CTR:
348 AES_HASH->CRYPTO_CTRL_REG |= (0x2 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Pos);
349 break;
350 case CRYPTO_CIPHER_MODE_CBC:
351 AES_HASH->CRYPTO_CTRL_REG |= (0x3 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Pos);
352 break;
353 default:
354 return -EINVAL;
355 }
356
357 return 0;
358 }
359
crypto_smartbond_hash_set_algo(enum hash_algo algo)360 static int crypto_smartbond_hash_set_algo(enum hash_algo algo)
361 {
362 /* Select HASH mode and reset to 32-bit mode */
363 AES_HASH->CRYPTO_CTRL_REG =
364 (AES_HASH->CRYPTO_CTRL_REG & ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_Msk |
365 AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Msk)) |
366 AES_HASH_CRYPTO_CTRL_REG_CRYPTO_HASH_SEL_Msk;
367
368 switch (algo) {
369 case CRYPTO_HASH_ALGO_SHA224:
370 /* CRYPTO_ALG_MD = 0x0 defines 32-bit operations */
371 AES_HASH->CRYPTO_CTRL_REG |= (0x2 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_Pos);
372 break;
373 case CRYPTO_HASH_ALGO_SHA256:
374 /* CRYPTO_ALG_MD = 0x0 defines 32-bit operations */
375 AES_HASH->CRYPTO_CTRL_REG |= (0x3 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_Pos);
376 break;
377 case CRYPTO_HASH_ALGO_SHA384:
378 /* CRYPTO_ALG_MD = 0x1 defines 64-bit operations */
379 AES_HASH->CRYPTO_CLRIRQ_REG |= AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Msk;
380 break;
381 case CRYPTO_HASH_ALGO_SHA512:
382 /* CRYPTO_ALG_MD = 0x1 defines 64-bit operations */
383 AES_HASH->CRYPTO_CTRL_REG |= (AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_MD_Msk |
384 (0x1 << AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ALG_Pos));
385 break;
386 default:
387 return -EINVAL;
388 }
389
390 return 0;
391 }
392
crypto_smartbond_set_in_out_buf(uint8_t * in_buf,uint8_t * out_buf,int len)393 static int crypto_smartbond_set_in_out_buf(uint8_t *in_buf, uint8_t *out_buf, int len)
394 {
395 if (in_buf == NULL) {
396 return -EIO;
397 }
398
399 /*
400 * Input data can reside in any address space. Cryto DMA can only access physical addresses
401 * (not remapped).
402 */
403 uint32_t phy_addr = black_orca_phy_addr((uint32_t)in_buf);
404
405 if (IS_QSPIF_CACHED_ADDRESS(phy_addr)) {
406 /*
407 * To achiebe max. perfomance, peripherals should not access the Flash memory
408 * through the instruction cache controller (avoid cache misses).
409 */
410 phy_addr += (MCU_QSPIF_M_BASE - MCU_QSPIF_M_CACHED_BASE);
411 } else if (IS_OTP_ADDRESS(phy_addr)) {
412 /* Peripherals should access the OTP memory through its peripheral address space. */
413 phy_addr += (MCU_OTP_M_P_BASE - MCU_OTP_M_BASE);
414 }
415
416 AES_HASH->CRYPTO_FETCH_ADDR_REG = phy_addr;
417
418 /*
419 * OUT buffer can be NULL in case of fregmented data processing. CRYPTO_DEST_ADDR and
420 * CRYPTO_FETCH_ADDR are being updated as calculations prceed and OUT data are written
421 * into memory.
422 */
423 if (out_buf) {
424 uint32_t remap_adr0 = CRG_TOP->SYS_CTRL_REG & CRG_TOP_SYS_CTRL_REG_REMAP_ADR0_Msk;
425
426 /*
427 * OUT data can only be written in SYSRAM, non-cached remapped SYSRAM and
428 * cached non-remapped SYSRAM.
429 */
430 if (IS_SYSRAM_ADDRESS(out_buf) ||
431 (IS_REMAPPED_ADDRESS(out_buf) && remap_adr0 == 3)) {
432 AES_HASH->CRYPTO_DEST_ADDR_REG = black_orca_phy_addr((uint32_t)out_buf);
433 } else {
434 return -EIO;
435 }
436 }
437
438 AES_HASH->CRYPTO_LEN_REG = len;
439
440 return 0;
441 }
442
crypto_smartbond_cipher_store_dep_data(uint32_t * words,uint32_t len_words)443 static inline void crypto_smartbond_cipher_store_dep_data(uint32_t *words, uint32_t len_words)
444 {
445 volatile uint32_t *mreg3 = &AES_HASH->CRYPTO_MREG3_REG;
446
447 for (int i = 0; i < len_words; i++) {
448 *(mreg3--) = crypto_smartbond_swap_word((uint8_t *)(words++));
449 }
450 }
451
crypto_smartbond_cipher_set_mreg(uint8_t * mreg,uint32_t len_words)452 static int crypto_smartbond_cipher_set_mreg(uint8_t *mreg, uint32_t len_words)
453 {
454 if (mreg == NULL || len_words == 0 || len_words > 4) {
455 return -EINVAL;
456 }
457
458 AES_HASH->CRYPTO_MREG0_REG = 0;
459 AES_HASH->CRYPTO_MREG1_REG = 0;
460 AES_HASH->CRYPTO_MREG2_REG = 0;
461 AES_HASH->CRYPTO_MREG3_REG = 0;
462
463 crypto_smartbond_cipher_store_dep_data((uint32_t *)mreg, len_words);
464
465 return 0;
466 }
467
crypto_smartbond_set_status(bool enable)468 static void crypto_smartbond_set_status(bool enable)
469 {
470 unsigned int key;
471
472 key = irq_lock();
473
474 if (enable) {
475 CRG_TOP->CLK_AMBA_REG |= (CRG_TOP_CLK_AMBA_REG_AES_CLK_ENABLE_Msk);
476
477 AES_HASH->CRYPTO_CLRIRQ_REG = 0x1;
478 AES_HASH->CRYPTO_CTRL_REG |= (AES_HASH_CRYPTO_CTRL_REG_CRYPTO_IRQ_EN_Msk);
479
480 irq_enable(SMARTBOND_IRQN);
481 } else {
482 AES_HASH->CRYPTO_CTRL_REG &= ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_IRQ_EN_Msk);
483 AES_HASH->CRYPTO_CLRIRQ_REG = 0x1;
484
485 irq_disable(SMARTBOND_IRQN);
486
487 CRG_TOP->CLK_AMBA_REG &= ~(CRG_TOP_CLK_AMBA_REG_AES_CLK_ENABLE_Msk);
488 }
489
490 irq_unlock(key);
491 }
492
crypto_smartbond_query_hw_caps(const struct device * dev)493 static int crypto_smartbond_query_hw_caps(const struct device *dev)
494 {
495 return CRYPTO_HW_CAPS;
496 }
497
crypto_smartbond_cipher_ecb_handler(struct cipher_ctx * ctx,struct cipher_pkt * pkt)498 static int crypto_smartbond_cipher_ecb_handler(struct cipher_ctx *ctx, struct cipher_pkt *pkt)
499 {
500 int ret;
501 struct crypto_smartbond_data *data = ctx->device->data;
502
503 if ((AES_HASH->CRYPTO_STATUS_REG & AES_HASH_CRYPTO_STATUS_REG_CRYPTO_INACTIVE_Msk) == 0) {
504 LOG_ERR("Crypto engine is already employed");
505 return -EINVAL;
506 }
507
508 if (pkt->out_buf_max < pkt->in_len) {
509 LOG_ERR("OUT buffer cannot be less that IN buffer");
510 return -EINVAL;
511 }
512
513 if (pkt->in_buf == NULL || pkt->out_buf == NULL) {
514 LOG_ERR("Missing IN or OUT buffer declaration");
515 return -EIO;
516 }
517
518 if (pkt->in_len > 16) {
519 LOG_ERR("For security reasons, do not operate on more than 16 bytes");
520 return -EINVAL;
521 }
522
523 k_sem_take(&data->device_sem, K_FOREVER);
524
525 ret = crypto_smartbond_check_in_restrictions(pkt->in_len);
526 if (ret < 0) {
527 LOG_ERR("Unsupported IN buffer size");
528 k_sem_give(&data->device_sem);
529 return ret;
530 }
531
532 ret = crypto_smartbond_set_in_out_buf(pkt->in_buf, pkt->out_buf, pkt->in_len);
533 if (ret < 0) {
534 LOG_ERR("Unsupported IN or OUT buffer location");
535 k_sem_give(&data->device_sem);
536 return ret;
537 }
538
539 #if defined(CONFIG_CRYPTO_ASYNC)
540 data->cipher_pkt = pkt;
541 #endif
542
543 /* Start crypto processing */
544 AES_HASH->CRYPTO_START_REG = 1;
545
546 #if !defined(CONFIG_CRYPTO_ASYNC)
547 /* Wait for crypto to finish its task */
548 k_sem_take(&data->sync_sem, K_FOREVER);
549 #endif
550
551 /* Report that number of bytes operated upon. */
552 pkt->out_len = pkt->in_len;
553
554 k_sem_give(&data->device_sem);
555
556 return 0;
557 }
558
559 static int
crypto_smartbond_cipher_cbc_handler(struct cipher_ctx * ctx,struct cipher_pkt * pkt,uint8_t * iv)560 crypto_smartbond_cipher_cbc_handler(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uint8_t *iv)
561 {
562 int ret;
563 int offset = 0;
564 struct crypto_smartbond_data *data = ctx->device->data;
565 bool is_op_encryption =
566 !!(AES_HASH->CRYPTO_CTRL_REG & AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ENCDEC_Msk);
567
568 if ((AES_HASH->CRYPTO_STATUS_REG & AES_HASH_CRYPTO_STATUS_REG_CRYPTO_INACTIVE_Msk) == 0) {
569 LOG_ERR("Crypto engine is already employed");
570 return -EINVAL;
571 }
572
573 if ((is_op_encryption && pkt->out_buf_max < (pkt->in_len + 16)) ||
574 pkt->out_buf_max < (pkt->in_len - 16)) {
575 LOG_ERR("Invalid OUT buffer size");
576 return -EINVAL;
577 }
578
579 if (pkt->in_buf == NULL || pkt->out_buf == NULL) {
580 LOG_ERR("Missing IN or OUT buffer declaration");
581 return -EIO;
582 }
583
584 if ((ctx->flags & CAP_NO_IV_PREFIX) == 0) {
585 offset = 16;
586 if (is_op_encryption) {
587 /* Prefix IV to ciphertet unless CAP_NO_IV_PREFIX is set. */
588 memcpy(pkt->out_buf, iv, offset);
589 }
590 }
591
592 k_sem_take(&data->device_sem, K_FOREVER);
593
594 ret = crypto_smartbond_check_in_restrictions(pkt->in_len);
595 if (ret < 0) {
596 LOG_ERR("Unsupported IN buffer size");
597 k_sem_give(&data->device_sem);
598 return ret;
599 }
600
601 ret = crypto_smartbond_cipher_set_mreg(iv, 4);
602 if (ret < 0) {
603 LOG_ERR("Missing Initialization Vector (IV)");
604 k_sem_give(&data->device_sem);
605 return ret;
606 }
607
608 if (is_op_encryption) {
609 ret = crypto_smartbond_set_in_out_buf(pkt->in_buf,
610 pkt->out_buf + offset, pkt->in_len);
611 } else {
612 ret = crypto_smartbond_set_in_out_buf(pkt->in_buf + offset,
613 pkt->out_buf, pkt->in_len - offset);
614 }
615
616 if (ret < 0) {
617 LOG_ERR("Unsupported IN or OUT buffer location");
618 k_sem_give(&data->device_sem);
619 return ret;
620 }
621
622 #if defined(CONFIG_CRYPTO_ASYNC)
623 data->cipher_pkt = pkt;
624 #endif
625
626 /* Start crypto processing */
627 AES_HASH->CRYPTO_START_REG = 1;
628
629 #if !defined(CONFIG_CRYPTO_ASYNC)
630 /* Wait for crypto to finish its task */
631 k_sem_take(&data->sync_sem, K_FOREVER);
632 #endif
633
634 /* Report that number of bytes operated upon. */
635 if (is_op_encryption) {
636 pkt->out_len = pkt->in_len + offset;
637 } else {
638 pkt->out_len = pkt->in_len - offset;
639 }
640
641 k_sem_give(&data->device_sem);
642
643 return 0;
644 }
645
crypto_smartbond_cipher_ctr_handler(struct cipher_ctx * ctx,struct cipher_pkt * pkt,uint8_t * ic)646 static int crypto_smartbond_cipher_ctr_handler(struct cipher_ctx *ctx,
647 struct cipher_pkt *pkt, uint8_t *ic)
648 {
649 int ret;
650 /* ivlen + ctrlen = keylen, ctrl_len is expressed in bits */
651 uint32_t iv_len = ctx->keylen - (ctx->mode_params.ctr_info.ctr_len >> 3);
652 struct crypto_smartbond_data *data = ctx->device->data;
653
654 if ((AES_HASH->CRYPTO_STATUS_REG & AES_HASH_CRYPTO_STATUS_REG_CRYPTO_INACTIVE_Msk) == 0) {
655 LOG_ERR("Crypto engine is already employed");
656 return -EINVAL;
657 }
658
659 if (pkt->out_buf_max < pkt->in_len) {
660 LOG_ERR("OUT buffer cannot be less that IN buffer");
661 return -EINVAL;
662 }
663
664 if (pkt->in_buf == NULL || pkt->out_buf == NULL) {
665 LOG_ERR("Missing IN or OUT buffer declaration");
666 return -EIO;
667 }
668
669 k_sem_take(&data->device_sem, K_FOREVER);
670
671 ret = crypto_smartbond_check_in_restrictions(pkt->in_len);
672 if (ret < 0) {
673 LOG_ERR("Unsupported IN buffer size");
674 k_sem_give(&data->device_sem);
675 return ret;
676 }
677
678 ret = crypto_smartbond_cipher_set_mreg(ic, iv_len >> 2);
679 if (ret < 0) {
680 LOG_ERR("Missing Initialization Counter (IC)");
681 k_sem_give(&data->device_sem);
682 return ret;
683 }
684
685 ret = crypto_smartbond_set_in_out_buf(pkt->in_buf, pkt->out_buf, pkt->in_len);
686 if (ret < 0) {
687 LOG_ERR("Unsupported IN or OUT buffer location");
688 k_sem_give(&data->device_sem);
689 return ret;
690 }
691
692 #if defined(CONFIG_CRYPTO_ASYNC)
693 data->cipher_pkt = pkt;
694 #endif
695
696 /* Start crypto processing */
697 AES_HASH->CRYPTO_START_REG = 1;
698
699 #if !defined(CONFIG_CRYPTO_ASYNC)
700 /* Wait for crypto to finish its task */
701 k_sem_take(&data->sync_sem, K_FOREVER);
702 #endif
703
704 /* Report that number of bytes operated upon. */
705 pkt->out_len = pkt->in_len;
706
707 k_sem_give(&data->device_sem);
708
709 return 0;
710 }
711
crypto_smartbond_hash_handler(struct hash_ctx * ctx,struct hash_pkt * pkt,bool finish)712 static int crypto_smartbond_hash_handler(struct hash_ctx *ctx, struct hash_pkt *pkt, bool finish)
713 {
714 int ret;
715 struct crypto_smartbond_data *data = ctx->device->data;
716 /*
717 * In case of framgemented data processing crypto status should be visible as busy for
718 * as long as the last block is to be processed.
719 */
720 bool is_multipart_started =
721 (AES_HASH->CRYPTO_STATUS_REG & AES_HASH_CRYPTO_STATUS_REG_CRYPTO_WAIT_FOR_IN_Msk) &&
722 !(AES_HASH->CRYPTO_STATUS_REG & AES_HASH_CRYPTO_STATUS_REG_CRYPTO_INACTIVE_Msk);
723
724 if (pkt->in_buf == NULL || (pkt->out_buf == NULL)) {
725 LOG_ERR("Missing IN or OUT buffer declaration");
726 return -EIO;
727 }
728
729 k_sem_take(&data->device_sem, K_FOREVER);
730
731 /* Check if this is the last block to process or more blocks will follow */
732 if (finish) {
733 AES_HASH->CRYPTO_CTRL_REG &= ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_MORE_IN_Msk);
734 } else {
735 AES_HASH->CRYPTO_CTRL_REG |= AES_HASH_CRYPTO_CTRL_REG_CRYPTO_MORE_IN_Msk;
736 }
737
738 /* CRYPTO_MORE_IN should be updated prior to checking for IN restrictions! */
739 ret = crypto_smartbond_check_in_restrictions(pkt->in_len);
740 if (ret < 0) {
741 LOG_ERR("Unsupported IN buffer size");
742 k_sem_give(&data->device_sem);
743 return ret;
744 }
745
746 if (!is_multipart_started) {
747 ret = crypto_smartbond_hash_set_out_len();
748 if (ret < 0) {
749 LOG_ERR("Invalid OUT buffer size");
750 k_sem_give(&data->device_sem);
751 return ret;
752 }
753 }
754
755 if (!is_multipart_started) {
756 ret = crypto_smartbond_set_in_out_buf(pkt->in_buf, pkt->out_buf, pkt->in_len);
757 } else {
758 /* Destination buffer is being updated as fragmented input is being processed. */
759 ret = crypto_smartbond_set_in_out_buf(pkt->in_buf, NULL, pkt->in_len);
760 }
761
762 if (ret < 0) {
763 LOG_ERR("Unsupported IN or OUT buffer location");
764 k_sem_give(&data->device_sem);
765 return ret;
766 }
767
768 #if defined(CONFIG_CRYPTO_ASYNC)
769 data->hash_pkt = pkt;
770 #endif
771
772 /* Start hash processing */
773 AES_HASH->CRYPTO_START_REG = 1;
774
775 #if !defined(CONFIG_CRYPTO_ASYNC)
776 k_sem_take(&data->sync_sem, K_FOREVER);
777 #endif
778
779 k_sem_give(&data->device_sem);
780
781 return 0;
782 }
783
784 static int
crypto_smartbond_cipher_begin_session(const struct device * dev,struct cipher_ctx * ctx,enum cipher_algo algo,enum cipher_mode mode,enum cipher_op op_type)785 crypto_smartbond_cipher_begin_session(const struct device *dev, struct cipher_ctx *ctx,
786 enum cipher_algo algo, enum cipher_mode mode, enum cipher_op op_type)
787 {
788 int ret;
789
790 if (ctx->flags & ~(CRYPTO_HW_CAPS)) {
791 LOG_ERR("Unsupported flag");
792 return -EINVAL;
793 }
794
795 if (algo != CRYPTO_CIPHER_ALGO_AES) {
796 LOG_ERR("Unsupported cipher algo");
797 return -EINVAL;
798 }
799
800 if (!crypto_smartbond_lock_session(dev)) {
801 LOG_ERR("No free session for now");
802 return -ENOSPC;
803 }
804
805 /* First check if the requested cryptographic algo is supported */
806 ret = crypto_smartbond_cipher_set_mode(mode);
807 if (ret < 0) {
808 LOG_ERR("Unsupported cipher mode");
809 crypto_smartbond_unlock_session(dev);
810 return ret;
811 }
812
813 ret = crypto_smartbond_cipher_key_load((uint8_t *)ctx->key.bit_stream, ctx->keylen);
814 if (ret < 0) {
815 LOG_ERR("Invalid key length or key cannot be accessed");
816 crypto_smartbond_unlock_session(dev);
817 return ret;
818 }
819
820 if (op_type == CRYPTO_CIPHER_OP_ENCRYPT) {
821 AES_HASH->CRYPTO_CTRL_REG |= AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ENCDEC_Msk;
822 } else {
823 AES_HASH->CRYPTO_CTRL_REG &= ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_ENCDEC_Msk);
824 }
825
826 /* IN buffer fragmentation is not supported by the driver model */
827 AES_HASH->CRYPTO_CTRL_REG &= ~(AES_HASH_CRYPTO_CTRL_REG_CRYPTO_MORE_IN_Msk);
828
829 switch (mode) {
830 case CRYPTO_CIPHER_MODE_ECB:
831 ctx->ops.block_crypt_hndlr = crypto_smartbond_cipher_ecb_handler;
832 break;
833 case CRYPTO_CIPHER_MODE_CBC:
834 ctx->ops.cbc_crypt_hndlr = crypto_smartbond_cipher_cbc_handler;
835 break;
836 case CRYPTO_CIPHER_MODE_CTR:
837 ctx->ops.ctr_crypt_hndlr = crypto_smartbond_cipher_ctr_handler;
838 break;
839 default:
840 break;
841 }
842
843 ctx->drv_sessn_state = NULL;
844
845 return 0;
846 }
847
crypto_smartbond_cipher_free_session(const struct device * dev,struct cipher_ctx * ctx)848 static int crypto_smartbond_cipher_free_session(const struct device *dev, struct cipher_ctx *ctx)
849 {
850 ARG_UNUSED(ctx);
851 crypto_smartbond_unlock_session(dev);
852
853 return 0;
854 }
855
856 #if defined(CONFIG_CRYPTO_ASYNC)
857 static int
crypto_smartbond_cipher_set_async_callback(const struct device * dev,cipher_completion_cb cb)858 crypto_smartbond_cipher_set_async_callback(const struct device *dev, cipher_completion_cb cb)
859 {
860 struct crypto_smartbond_data *data = dev->data;
861
862 data->cipher_user_cb = cb;
863
864 return 0;
865 }
866 #endif
867
868 static int
crypto_smartbond_hash_begin_session(const struct device * dev,struct hash_ctx * ctx,enum hash_algo algo)869 crypto_smartbond_hash_begin_session(const struct device *dev,
870 struct hash_ctx *ctx, enum hash_algo algo)
871 {
872 int ret;
873
874 if (ctx->flags & ~(CRYPTO_HW_CAPS)) {
875 LOG_ERR("Unsupported flag");
876 return -EINVAL;
877 }
878
879 if (!crypto_smartbond_lock_session(dev)) {
880 LOG_ERR("No free session for now");
881 return -ENOSPC;
882 }
883
884 /*
885 * Crypto should be disabled only if not used in other sessions. In case of failure,
886 * developer should next free the current session.
887 */
888 crypto_smartbond_set_status(true);
889
890 ret = crypto_smartbond_hash_set_algo(algo);
891 if (ret < 0) {
892 LOG_ERR("Unsupported HASH algo");
893 crypto_smartbond_unlock_session(dev);
894 return ret;
895 }
896
897 ctx->hash_hndlr = crypto_smartbond_hash_handler;
898
899 ctx->drv_sessn_state = NULL;
900
901 return 0;
902 }
903
crypto_smartbond_hash_free_session(const struct device * dev,struct hash_ctx * ctx)904 static int crypto_smartbond_hash_free_session(const struct device *dev, struct hash_ctx *ctx)
905 {
906 ARG_UNUSED(ctx);
907 crypto_smartbond_unlock_session(dev);
908
909 return 0;
910 }
911
912 #if defined(CONFIG_CRYPTO_ASYNC)
913 static int
crypto_smartbond_hash_set_async_callback(const struct device * dev,hash_completion_cb cb)914 crypto_smartbond_hash_set_async_callback(const struct device *dev, hash_completion_cb cb)
915 {
916 struct crypto_smartbond_data *data = dev->data;
917
918 data->hash_user_cb = cb;
919
920 return 0;
921 }
922 #endif
923
924 static DEVICE_API(crypto, crypto_smartbond_driver_api) = {
925 .cipher_begin_session = crypto_smartbond_cipher_begin_session,
926 .cipher_free_session = crypto_smartbond_cipher_free_session,
927 #if defined(CONFIG_CRYPTO_ASYNC)
928 .cipher_async_callback_set = crypto_smartbond_cipher_set_async_callback,
929 #endif
930 .hash_begin_session = crypto_smartbond_hash_begin_session,
931 .hash_free_session = crypto_smartbond_hash_free_session,
932 #if defined(CONFIG_CRYPTO_ASYNC)
933 .hash_async_callback_set = crypto_smartbond_hash_set_async_callback,
934 #endif
935 .query_hw_caps = crypto_smartbond_query_hw_caps
936 };
937
938 #if defined(CONFIG_PM_DEVICE)
crypto_smartbond_pm_action(const struct device * dev,enum pm_device_action action)939 static int crypto_smartbond_pm_action(const struct device *dev,
940 enum pm_device_action action)
941 {
942 int ret = 0;
943
944 switch (action) {
945 case PM_DEVICE_ACTION_SUSPEND:
946 /*
947 * No need to perform any actions here as the AES/HASH controller
948 * should already be turned off.
949 */
950 break;
951 case PM_DEVICE_ACTION_RESUME:
952 /*
953 * No need to perform any actions here as the AES/HASH controller
954 * will be initialized upon acquiring a cryptographic session.
955 */
956 break;
957 default:
958 return -ENOTSUP;
959 }
960
961 return ret;
962 }
963 #endif
964
crypto_smartbond_init(const struct device * dev)965 static int crypto_smartbond_init(const struct device *dev)
966 {
967 struct crypto_smartbond_data *data = dev->data;
968
969 /* Semaphore used during sessions (begin/free) */
970 k_sem_init(&data->session_sem, 1, 1);
971
972 /* Semaphore used to employ the crypto device */
973 k_sem_init(&data->device_sem, 1, 1);
974
975 #if !defined(CONFIG_CRYPTO_ASYNC)
976 /* Sempahore used when sync operations are enabled */
977 k_sem_init(&data->sync_sem, 0, 1);
978 #endif
979
980 IRQ_CONNECT(SMARTBOND_IRQN, SMARTBOND_IRQ_PRIO, smartbond_crypto_isr,
981 DEVICE_DT_INST_GET(0), 0);
982
983 /* Controller should be initialized once a crypyographic session is requested */
984 crypto_smartbond_set_status(false);
985
986 return 0;
987 }
988
989 /*
990 * There is only one instance integrated on the SoC. Just in case that assumption becomes invalid
991 * in the future, we use a BUILD_ASSERT().
992 */
993 #define SMARTBOND_CRYPTO_INIT(inst) \
994 BUILD_ASSERT((inst) == 0, \
995 "multiple instances are not supported"); \
996 \
997 PM_DEVICE_DT_INST_DEFINE(inst, crypto_smartbond_pm_action); \
998 \
999 static struct crypto_smartbond_data crypto_smartbond_data_##inst; \
1000 \
1001 DEVICE_DT_INST_DEFINE(0, \
1002 crypto_smartbond_init, \
1003 PM_DEVICE_DT_INST_GET(inst), \
1004 &crypto_smartbond_data_##inst, NULL, \
1005 POST_KERNEL, \
1006 CONFIG_CRYPTO_INIT_PRIORITY, \
1007 &crypto_smartbond_driver_api);
1008
1009 DT_INST_FOREACH_STATUS_OKAY(SMARTBOND_CRYPTO_INIT)
1010