1 /*
2 * Copyright (c) 2024 BayLibre, SAS
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #define DT_DRV_COMPAT ti_cc23x0_aes
8
9 #include <zephyr/logging/log.h>
10 LOG_MODULE_REGISTER(crypto_cc23x0, CONFIG_CRYPTO_LOG_LEVEL);
11
12 #include <zephyr/crypto/crypto.h>
13 #include <zephyr/device.h>
14 #include <zephyr/irq.h>
15 #include <zephyr/kernel.h>
16 #include <zephyr/sys/util.h>
17
18 #include <string.h>
19
20 #include <driverlib/aes.h>
21 #include <driverlib/clkctl.h>
22
23 #define CRYPTO_CC23_CAP (CAP_RAW_KEY | CAP_SEPARATE_IO_BUFS | \
24 CAP_SYNC_OPS | CAP_NO_IV_PREFIX)
25
26 #define CRYPTO_CC23_INT_MASK AES_IMASK_AESDONE
27
28 /* CCM mode: see https://datatracker.ietf.org/doc/html/rfc3610 for reference */
29 #define CCM_CC23_MSG_LEN_SIZE_MIN 2
30 #define CCM_CC23_MSG_LEN_SIZE_MAX 8
31
32 #define CCM_CC23_NONCE_LEN_SIZE_MIN (AES_BLOCK_SIZE - CCM_CC23_MSG_LEN_SIZE_MAX - 1)
33 #define CCM_CC23_NONCE_LEN_SIZE_MAX (AES_BLOCK_SIZE - CCM_CC23_MSG_LEN_SIZE_MIN - 1)
34
35 #define CCM_CC23_AD_LEN_SIZE 2
36 #define CCM_CC23_AD_DATA_SIZE_MAX (AES_BLOCK_SIZE - CCM_CC23_AD_LEN_SIZE)
37
38 #define CCM_CC23_TAG_SIZE_MIN 4
39 #define CCM_CC23_TAG_SIZE_MAX 16
40
41 #define CCM_CC23_BYTE_GET(idx, val) FIELD_GET(0xff << ((idx) << 3), (val))
42
43 #define CCM_CC23_B0_GET(ad_len, tag_len, len_size) \
44 (((ad_len) ? 1 << 6 : 0) + (((tag_len) - 2) << 2) + ((len_size) - 1))
45
46 /*
47 * The Finite State Machine (FSM) processes the data in a column-fashioned way,
48 * processing 2 columns/cycle, completing 10 rounds in 20 cycles. With three cycles
49 * of pre-processing, the execution/encryption time is 23 cycles.
50 */
51 #define CRYPTO_CC23_OP_TIMEOUT K_CYC(23 << 1)
52
53 struct crypto_cc23x0_data {
54 struct k_mutex device_mutex;
55 struct k_sem aes_done;
56 };
57
crypto_cc23x0_isr(const struct device * dev)58 static void crypto_cc23x0_isr(const struct device *dev)
59 {
60 struct crypto_cc23x0_data *data = dev->data;
61 uint32_t status;
62
63 status = AESGetMaskedInterruptStatus();
64
65 if (status & AES_IMASK_AESDONE) {
66 k_sem_give(&data->aes_done);
67 }
68
69 AESClearInterrupt(status);
70 }
71
crypto_cc23x0_cleanup(void)72 static void crypto_cc23x0_cleanup(void)
73 {
74 AESClearAUTOCFGTrigger();
75 AESClearAUTOCFGBusHalt();
76 AESClearTXTAndBUF();
77 }
78
crypto_cc23x0_ecb_encrypt(struct cipher_ctx * ctx,struct cipher_pkt * pkt)79 static int crypto_cc23x0_ecb_encrypt(struct cipher_ctx *ctx, struct cipher_pkt *pkt)
80 {
81 const struct device *dev = ctx->device;
82 struct crypto_cc23x0_data *data = dev->data;
83 int in_bytes_processed = 0;
84 int out_bytes_processed = 0;
85 int ret;
86
87 if (pkt->out_buf_max < ROUND_UP(pkt->in_len, AES_BLOCK_SIZE)) {
88 LOG_ERR("Output buffer too small");
89 return -EINVAL;
90 }
91
92 k_mutex_lock(&data->device_mutex, K_FOREVER);
93
94 /* Load key */
95 AESWriteKEY(ctx->key.bit_stream);
96
97 /* Configure source buffer and encryption triggers */
98 AESSetAUTOCFG(AES_AUTOCFG_AESSRC_BUF |
99 AES_AUTOCFG_TRGAES_RDTXT3 |
100 AES_AUTOCFG_TRGAES_WRBUF3S);
101
102 /* Write first block of input to trigger encryption */
103 AESWriteBUF(pkt->in_buf);
104 in_bytes_processed += AES_BLOCK_SIZE;
105
106 do {
107 if (in_bytes_processed < pkt->in_len) {
108 /* Preload next input block */
109 AESWriteBUF(&pkt->in_buf[in_bytes_processed]);
110 in_bytes_processed += AES_BLOCK_SIZE;
111 } else {
112 /* Avoid triggering a spurious encryption upon reading the final output */
113 AESClearAUTOCFGTrigger();
114 }
115
116 /* Wait for AES operation completion */
117 ret = k_sem_take(&data->aes_done, CRYPTO_CC23_OP_TIMEOUT);
118 if (ret) {
119 goto cleanup;
120 }
121
122 LOG_DBG("AES operation completed");
123
124 /*
125 * Read output and trigger encryption of next input that was
126 * preloaded at the start of this loop.
127 */
128 AESReadTXT(&pkt->out_buf[out_bytes_processed]);
129 out_bytes_processed += AES_BLOCK_SIZE;
130 } while (out_bytes_processed < pkt->in_len);
131
132 cleanup:
133 crypto_cc23x0_cleanup();
134 k_mutex_unlock(&data->device_mutex);
135 pkt->out_len = out_bytes_processed;
136
137 return ret;
138 }
139
crypto_cc23x0_ctr(struct cipher_ctx * ctx,struct cipher_pkt * pkt,uint8_t * iv)140 static int crypto_cc23x0_ctr(struct cipher_ctx *ctx, struct cipher_pkt *pkt, uint8_t *iv)
141 {
142 const struct device *dev = ctx->device;
143 struct crypto_cc23x0_data *data = dev->data;
144 uint32_t ctr_len = ctx->mode_params.ctr_info.ctr_len >> 3;
145 uint8_t ctr[AES_BLOCK_SIZE] = { 0 };
146 uint8_t last_buf[AES_BLOCK_SIZE] = { 0 };
147 int bytes_remaining = pkt->in_len;
148 int bytes_processed = 0;
149 int block_size;
150 int iv_len;
151 int ret;
152
153 if (pkt->out_buf_max < ROUND_UP(pkt->in_len, AES_BLOCK_SIZE)) {
154 LOG_ERR("Output buffer too small");
155 return -EINVAL;
156 }
157
158 k_mutex_lock(&data->device_mutex, K_FOREVER);
159
160 /* Load key */
161 AESWriteKEY(ctx->key.bit_stream);
162
163 /* Configure source buffer and encryption triggers */
164 AESSetAUTOCFG(AES_AUTOCFG_AESSRC_BUF |
165 AES_AUTOCFG_TRGAES_RDTXT3 |
166 AES_AUTOCFG_TRGAES_WRBUF3S |
167 AES_AUTOCFG_CTRENDN_BIGENDIAN |
168 AES_AUTOCFG_CTRSIZE_CTR128);
169
170 /* Write the counter value to the AES engine to trigger first encryption */
171 iv_len = (ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_CCM) ?
172 AES_BLOCK_SIZE : (ctx->keylen - ctr_len);
173 memcpy(ctr, iv, iv_len);
174 AESWriteBUF(ctr);
175
176 do {
177 /* Wait for AES operation completion */
178 ret = k_sem_take(&data->aes_done, CRYPTO_CC23_OP_TIMEOUT);
179 if (ret) {
180 goto cleanup;
181 }
182
183 LOG_DBG("AES operation completed");
184
185 /* XOR input data with encrypted counter block to form ciphertext */
186 if (bytes_remaining > AES_BLOCK_SIZE) {
187 block_size = AES_BLOCK_SIZE;
188 AESWriteTXTXOR(&pkt->in_buf[bytes_processed]);
189 } else {
190 block_size = bytes_remaining;
191 memcpy(last_buf, &pkt->in_buf[bytes_processed], block_size);
192 AESWriteTXTXOR(last_buf);
193
194 /*
195 * Do not auto-trigger encrypt and increment of counter
196 * value for last block of data.
197 */
198 AESClearAUTOCFGTrigger();
199 }
200
201 /*
202 * Read the output ciphertext and trigger the encryption
203 * of the next counter block
204 */
205 AESReadTXT(&pkt->out_buf[bytes_processed]);
206
207 bytes_processed += block_size;
208 bytes_remaining -= block_size;
209 } while (bytes_remaining > 0);
210
211 cleanup:
212 crypto_cc23x0_cleanup();
213 k_mutex_unlock(&data->device_mutex);
214 pkt->out_len = bytes_processed;
215
216 return ret;
217 }
218
crypto_cc23x0_cmac(struct cipher_ctx * ctx,struct cipher_pkt * pkt,uint8_t * b0,uint8_t * b1)219 static int crypto_cc23x0_cmac(struct cipher_ctx *ctx, struct cipher_pkt *pkt,
220 uint8_t *b0, uint8_t *b1)
221 {
222 const struct device *dev = ctx->device;
223 struct crypto_cc23x0_data *data = dev->data;
224 uint32_t iv[AES_BLOCK_SIZE_WORDS] = { 0 };
225 uint8_t last_buf[AES_BLOCK_SIZE] = { 0 };
226 int bytes_remaining = pkt->in_len;
227 int bytes_processed = 0;
228 int block_size;
229 int ret;
230
231 if (pkt->out_buf_max < AES_BLOCK_SIZE) {
232 LOG_ERR("Output buffer too small");
233 return -EINVAL;
234 }
235
236 k_mutex_lock(&data->device_mutex, K_FOREVER);
237
238 /* Load key */
239 AESWriteKEY(ctx->key.bit_stream);
240
241 /* Configure source buffer and encryption triggers */
242 AESSetAUTOCFG(AES_AUTOCFG_AESSRC_TXTXBUF |
243 AES_AUTOCFG_TRGAES_WRBUF3 |
244 AES_AUTOCFG_BUSHALT_EN);
245
246 /* Write zero'd IV */
247 AESWriteIV32(iv);
248
249 if (b0) {
250 /* Load input block */
251 AESWriteBUF(b0);
252
253 /* Wait for AES operation completion */
254 ret = k_sem_take(&data->aes_done, CRYPTO_CC23_OP_TIMEOUT);
255 if (ret) {
256 goto out;
257 }
258
259 LOG_DBG("AES operation completed (block 0)");
260 }
261
262 if (b1) {
263 /* Load input block */
264 AESWriteBUF(b1);
265
266 /* Wait for AES operation completion */
267 ret = k_sem_take(&data->aes_done, CRYPTO_CC23_OP_TIMEOUT);
268 if (ret) {
269 goto out;
270 }
271
272 LOG_DBG("AES operation completed (block 1)");
273 }
274
275 do {
276 /* Load input block */
277 if (bytes_remaining >= AES_BLOCK_SIZE) {
278 block_size = AES_BLOCK_SIZE;
279 AESWriteBUF(&pkt->in_buf[bytes_processed]);
280 } else {
281 block_size = bytes_remaining;
282 memcpy(last_buf, &pkt->in_buf[bytes_processed], block_size);
283 AESWriteBUF(last_buf);
284 }
285
286 /* Wait for AES operation completion */
287 ret = k_sem_take(&data->aes_done, CRYPTO_CC23_OP_TIMEOUT);
288 if (ret) {
289 goto out;
290 }
291
292 LOG_DBG("AES operation completed (data)");
293
294 bytes_processed += block_size;
295 bytes_remaining -= block_size;
296 } while (bytes_remaining > 0);
297
298 /* Read tag */
299 AESReadTag(pkt->out_buf);
300
301 out:
302 crypto_cc23x0_cleanup();
303 k_mutex_unlock(&data->device_mutex);
304 pkt->out_len = bytes_processed;
305
306 return ret;
307 }
308
crypto_cc23x0_ccm_check_param(struct cipher_ctx * ctx,struct cipher_aead_pkt * aead_op)309 static int crypto_cc23x0_ccm_check_param(struct cipher_ctx *ctx, struct cipher_aead_pkt *aead_op)
310 {
311 uint16_t ad_len = aead_op->ad_len;
312 uint16_t tag_len = ctx->mode_params.ccm_info.tag_len;
313 uint16_t nonce_len = ctx->mode_params.ccm_info.nonce_len;
314
315 if (aead_op->pkt->out_buf_max < ROUND_UP(aead_op->pkt->in_len, AES_BLOCK_SIZE)) {
316 LOG_ERR("Output buffer too small");
317 return -EINVAL;
318 }
319
320 if (tag_len < CCM_CC23_TAG_SIZE_MIN || tag_len > CCM_CC23_TAG_SIZE_MAX || tag_len & 1) {
321 LOG_ERR("CCM parameter invalid (tag_len must be an even value from %d to %d)",
322 CCM_CC23_TAG_SIZE_MIN, CCM_CC23_TAG_SIZE_MAX);
323 return -EINVAL;
324 }
325
326 if (nonce_len < CCM_CC23_NONCE_LEN_SIZE_MIN || nonce_len > CCM_CC23_NONCE_LEN_SIZE_MAX) {
327 LOG_ERR("CCM parameter invalid (nonce_len must be a value from %d to %d)",
328 CCM_CC23_NONCE_LEN_SIZE_MIN, CCM_CC23_NONCE_LEN_SIZE_MAX);
329 return -EINVAL;
330 }
331
332 if (ad_len > CCM_CC23_AD_DATA_SIZE_MAX) {
333 LOG_ERR("CCM parameter invalid (ad_len max = %d)", CCM_CC23_AD_DATA_SIZE_MAX);
334 return -EINVAL;
335 }
336
337 return 0;
338 }
339
crypto_cc23x0_ccm_encrypt(struct cipher_ctx * ctx,struct cipher_aead_pkt * aead_op,uint8_t * nonce)340 static int crypto_cc23x0_ccm_encrypt(struct cipher_ctx *ctx,
341 struct cipher_aead_pkt *aead_op, uint8_t *nonce)
342 {
343 struct cipher_pkt tag_pkt = { 0 };
344 struct cipher_pkt data_pkt = { 0 };
345 uint8_t tag[AES_BLOCK_SIZE] = { 0 };
346 uint8_t b0[AES_BLOCK_SIZE] = { 0 };
347 uint8_t b1[AES_BLOCK_SIZE] = { 0 };
348 uint8_t ctri[AES_BLOCK_SIZE] = { 0 };
349 uint32_t msg_len = aead_op->pkt->in_len;
350 uint16_t ad_len = aead_op->ad_len;
351 uint16_t tag_len = ctx->mode_params.ccm_info.tag_len;
352 uint16_t nonce_len = ctx->mode_params.ccm_info.nonce_len;
353 uint8_t len_size = AES_BLOCK_SIZE - nonce_len - 1;
354 int ret;
355 int i;
356
357 ret = crypto_cc23x0_ccm_check_param(ctx, aead_op);
358 if (ret) {
359 return ret;
360 }
361
362 /*
363 * Build the first block B0 required for CMAC computation.
364 * ============================================
365 * Block B0 formatting per RFC3610:
366 * Octet Number Contents
367 * ------------ ---------
368 * 0 Flags
369 * 1 ... 15-L Nonce N
370 * 16-L ... 15 l(m), MSB first
371 *
372 * Flags in octet 0 of B0:
373 * Bit Number Contents
374 * ---------- ----------------------
375 * 7 Reserved (always zero)
376 * 6 Adata = 1 if l(a) > 0, 0 otherwise
377 * 5 ... 3 M' = (M - 2) / 2 where M = Number of octets in authentication field
378 * 2 ... 0 L' = L - 1 where L = Number of octets in length field
379 * ============================================
380 */
381 b0[0] = CCM_CC23_B0_GET(aead_op->ad_len, tag_len, len_size);
382
383 for (i = 0 ; i < sizeof(msg_len) ; i++) {
384 b0[AES_BLOCK_SIZE - 1 - i] = CCM_CC23_BYTE_GET(i, msg_len);
385 }
386
387 memcpy(&b0[1], nonce, nonce_len);
388
389 /*
390 * Build the second block B1 for additional data (header).
391 * ============================================
392 * Block B1 formatting per RFC3610, for 0 < l(a) < (2^16 - 2^8):
393 * Octet Number Contents
394 * ------------ ---------
395 * 0 ... 1 l(a), MSB first
396 * 2 ... N Header data
397 * N+1 ... 15 Zero padding
398 * ============================================
399 */
400 for (i = 0 ; i < sizeof(ad_len) ; i++) {
401 b1[CCM_CC23_AD_LEN_SIZE - 1 - i] = CCM_CC23_BYTE_GET(i, ad_len);
402 }
403
404 memcpy(&b1[CCM_CC23_AD_LEN_SIZE], aead_op->ad, ad_len);
405
406 /* Calculate the authentication tag by passing B0, B1, and data to CMAC function. */
407 LOG_DBG("Compute CMAC");
408
409 data_pkt.in_buf = aead_op->pkt->in_buf;
410 data_pkt.in_len = aead_op->pkt->in_len;
411 data_pkt.out_buf = tag;
412 data_pkt.out_buf_max = AES_BLOCK_SIZE;
413
414 ret = crypto_cc23x0_cmac(ctx, &data_pkt, b0, b1);
415 if (ret) {
416 return ret;
417 }
418
419 /*
420 * Prepare the initial counter block CTR1 for the CTR mode.
421 * ============================================
422 * Block CTRi formatting per RFC3610:
423 * Octet Number Contents
424 * ------------ ---------
425 * 0 Flags
426 * 1 ... 15-L Nonce N
427 * 16-L ... 15 Counter i, MSB first
428 *
429 * Flags in octet 0 of CTR0:
430 * Bit Number Contents
431 * ---------- ----------------------
432 * 7 Reserved (always zero)
433 * 6 Reserved (always zero)
434 * 5 ... 3 Zero
435 * 2 ... 0 L' = L - 1 where L = Number of octets in length field
436 * ============================================
437 */
438 ctri[0] = len_size - 1;
439 memcpy(&ctri[1], nonce, nonce_len);
440 ctri[AES_BLOCK_SIZE - 1] = 1;
441
442 /* Encrypt the data using the counter block CTR1. */
443 LOG_DBG("Encrypt data");
444
445 ret = crypto_cc23x0_ctr(ctx, aead_op->pkt, ctri);
446 if (ret) {
447 return ret;
448 }
449
450 /* Encrypt the authentication tag using the counter block CTR0. */
451 LOG_DBG("Encrypt tag");
452
453 ctri[AES_BLOCK_SIZE - 1] = 0;
454
455 tag_pkt.in_buf = tag;
456 tag_pkt.in_len = tag_len;
457 tag_pkt.out_buf = aead_op->tag;
458 tag_pkt.out_buf_max = AES_BLOCK_SIZE;
459
460 return crypto_cc23x0_ctr(ctx, &tag_pkt, ctri);
461 }
462
crypto_cc23x0_ccm_decrypt(struct cipher_ctx * ctx,struct cipher_aead_pkt * aead_op,uint8_t * nonce)463 static int crypto_cc23x0_ccm_decrypt(struct cipher_ctx *ctx,
464 struct cipher_aead_pkt *aead_op, uint8_t *nonce)
465 {
466 struct cipher_pkt tag_pkt = { 0 };
467 struct cipher_pkt data_pkt = { 0 };
468 uint8_t enc_tag[AES_BLOCK_SIZE] = { 0 };
469 uint8_t calc_tag[AES_BLOCK_SIZE] = { 0 };
470 uint8_t b0[AES_BLOCK_SIZE] = { 0 };
471 uint8_t b1[AES_BLOCK_SIZE] = { 0 };
472 uint8_t ctri[AES_BLOCK_SIZE] = { 0 };
473 uint32_t msg_len = aead_op->pkt->in_len;
474 uint16_t ad_len = aead_op->ad_len;
475 uint16_t tag_len = ctx->mode_params.ccm_info.tag_len;
476 uint16_t nonce_len = ctx->mode_params.ccm_info.nonce_len;
477 uint8_t len_size = AES_BLOCK_SIZE - nonce_len - 1;
478 int ret;
479 int i;
480
481 ret = crypto_cc23x0_ccm_check_param(ctx, aead_op);
482 if (ret) {
483 return ret;
484 }
485
486 /* Prepare the initial counter block CTR1 for the CTR mode. */
487 ctri[0] = len_size - 1;
488 memcpy(&ctri[1], nonce, nonce_len);
489 ctri[AES_BLOCK_SIZE - 1] = 1;
490
491 /* Decrypt the data using the counter block CTR1. */
492 LOG_DBG("Decrypt data");
493
494 ret = crypto_cc23x0_ctr(ctx, aead_op->pkt, ctri);
495 if (ret) {
496 goto clear_out_buf;
497 }
498
499 /* Build the first block B0 required for CMAC computation. */
500 b0[0] = CCM_CC23_B0_GET(aead_op->ad_len, tag_len, len_size);
501
502 for (i = 0 ; i < sizeof(msg_len) ; i++) {
503 b0[AES_BLOCK_SIZE - 1 - i] = CCM_CC23_BYTE_GET(i, msg_len);
504 }
505
506 memcpy(&b0[1], nonce, nonce_len);
507
508 /* Build the second block B1 for additional data (header). */
509 for (i = 0 ; i < sizeof(ad_len) ; i++) {
510 b1[CCM_CC23_AD_LEN_SIZE - 1 - i] = CCM_CC23_BYTE_GET(i, ad_len);
511 }
512
513 memcpy(&b1[CCM_CC23_AD_LEN_SIZE], aead_op->ad, ad_len);
514
515 /*
516 * Calculate the authentication tag by passing B0, B1, and decrypted data
517 * to CMAC function.
518 */
519 LOG_DBG("Compute CMAC");
520
521 data_pkt.in_buf = aead_op->pkt->out_buf;
522 data_pkt.in_len = aead_op->pkt->out_len;
523 data_pkt.out_buf = calc_tag;
524 data_pkt.out_buf_max = AES_BLOCK_SIZE;
525
526 ret = crypto_cc23x0_cmac(ctx, &data_pkt, b0, b1);
527 if (ret) {
528 goto clear_out_buf;
529 }
530
531 /* Encrypt the recalculated authentication tag using the counter block CTR0. */
532 LOG_DBG("Encrypt tag");
533
534 ctri[AES_BLOCK_SIZE - 1] = 0;
535
536 tag_pkt.in_buf = calc_tag;
537 tag_pkt.in_len = tag_len;
538 tag_pkt.out_buf = enc_tag;
539 tag_pkt.out_buf_max = AES_BLOCK_SIZE;
540
541 ret = crypto_cc23x0_ctr(ctx, &tag_pkt, ctri);
542 if (ret) {
543 goto clear_out_buf;
544 }
545
546 /*
547 * Compare the recalculated encrypted authentication tag with the one supplied by the app.
548 * If they match, the decrypted data is returned. Otherwise, the authentication has failed
549 * and the output buffer is zero'd.
550 */
551 LOG_DBG("Check tag");
552
553 if (!memcmp(enc_tag, aead_op->tag, tag_len)) {
554 return 0;
555 }
556
557 LOG_ERR("Invalid tag");
558 ret = -EINVAL;
559
560 clear_out_buf:
561 memset(aead_op->pkt->out_buf, 0, msg_len);
562
563 return ret;
564 }
565
crypto_cc23x0_session_setup(const struct device * dev,struct cipher_ctx * ctx,enum cipher_algo algo,enum cipher_mode mode,enum cipher_op op_type)566 static int crypto_cc23x0_session_setup(const struct device *dev,
567 struct cipher_ctx *ctx,
568 enum cipher_algo algo,
569 enum cipher_mode mode,
570 enum cipher_op op_type)
571 {
572 if (ctx->flags & ~(CRYPTO_CC23_CAP)) {
573 LOG_ERR("Unsupported feature");
574 return -EINVAL;
575 }
576
577 if (algo != CRYPTO_CIPHER_ALGO_AES) {
578 LOG_ERR("Unsupported algo");
579 return -EINVAL;
580 }
581
582 if (mode != CRYPTO_CIPHER_MODE_ECB &&
583 mode != CRYPTO_CIPHER_MODE_CTR &&
584 mode != CRYPTO_CIPHER_MODE_CCM) {
585 LOG_ERR("Unsupported mode");
586 return -EINVAL;
587 }
588
589 if (ctx->keylen != 16U) {
590 LOG_ERR("%u key size is not supported", ctx->keylen);
591 return -EINVAL;
592 }
593
594 if (!ctx->key.bit_stream) {
595 LOG_ERR("No key provided");
596 return -EINVAL;
597 }
598
599 if (op_type == CRYPTO_CIPHER_OP_ENCRYPT) {
600 switch (mode) {
601 case CRYPTO_CIPHER_MODE_ECB:
602 ctx->ops.block_crypt_hndlr = crypto_cc23x0_ecb_encrypt;
603 break;
604 case CRYPTO_CIPHER_MODE_CTR:
605 ctx->ops.ctr_crypt_hndlr = crypto_cc23x0_ctr;
606 break;
607 case CRYPTO_CIPHER_MODE_CCM:
608 ctx->ops.ccm_crypt_hndlr = crypto_cc23x0_ccm_encrypt;
609 break;
610 default:
611 return -EINVAL;
612 }
613 } else {
614 switch (mode) {
615 case CRYPTO_CIPHER_MODE_ECB:
616 LOG_ERR("ECB decryption not supported by the hardware");
617 return -EINVAL;
618 case CRYPTO_CIPHER_MODE_CTR:
619 ctx->ops.ctr_crypt_hndlr = crypto_cc23x0_ctr;
620 break;
621 case CRYPTO_CIPHER_MODE_CCM:
622 ctx->ops.ccm_crypt_hndlr = crypto_cc23x0_ccm_decrypt;
623 break;
624 default:
625 return -EINVAL;
626 }
627 }
628
629 ctx->ops.cipher_mode = mode;
630 ctx->device = dev;
631
632 return 0;
633 }
634
crypto_cc23x0_session_free(const struct device * dev,struct cipher_ctx * ctx)635 static int crypto_cc23x0_session_free(const struct device *dev,
636 struct cipher_ctx *ctx)
637 {
638 ARG_UNUSED(dev);
639
640 ctx->ops.ccm_crypt_hndlr = NULL;
641 ctx->device = NULL;
642
643 return 0;
644 }
645
crypto_cc23x0_query_caps(const struct device * dev)646 static int crypto_cc23x0_query_caps(const struct device *dev)
647 {
648 ARG_UNUSED(dev);
649
650 return CRYPTO_CC23_CAP;
651 }
652
crypto_cc23x0_init(const struct device * dev)653 static int crypto_cc23x0_init(const struct device *dev)
654 {
655 struct crypto_cc23x0_data *data = dev->data;
656
657 IRQ_CONNECT(DT_INST_IRQN(0),
658 DT_INST_IRQ(0, priority),
659 crypto_cc23x0_isr,
660 DEVICE_DT_INST_GET(0),
661 0);
662 irq_enable(DT_INST_IRQN(0));
663
664 CLKCTLEnable(CLKCTL_BASE, CLKCTL_LAES);
665
666 AESSetIMASK(CRYPTO_CC23_INT_MASK);
667
668 k_mutex_init(&data->device_mutex);
669 k_sem_init(&data->aes_done, 0, 1);
670
671 return 0;
672 }
673
674 static DEVICE_API(crypto, crypto_enc_funcs) = {
675 .cipher_begin_session = crypto_cc23x0_session_setup,
676 .cipher_free_session = crypto_cc23x0_session_free,
677 .query_hw_caps = crypto_cc23x0_query_caps,
678 };
679
680 static struct crypto_cc23x0_data crypto_cc23x0_dev_data;
681
682 DEVICE_DT_INST_DEFINE(0,
683 crypto_cc23x0_init,
684 NULL,
685 &crypto_cc23x0_dev_data,
686 NULL,
687 POST_KERNEL,
688 CONFIG_CRYPTO_INIT_PRIORITY,
689 &crypto_enc_funcs);
690