Lines Matching +full:dma +full:- +full:channel +full:- +full:mask

4  * SPDX-License-Identifier: Apache-2.0
7 * - As there is only one AES controller, this implementation is not using a device configuration.
10 * - If not noted otherwise, chapter numbers refer to the SiM3U1XX/SiM3C1XX reference manual
11 * (SiM3U1xx-SiM3C1xx-RM.pdf, revision 1.0)
12 * - Each DMA channel has one word of unused data (=> 3 x 4 = 12 bytes of unused RAM)
23 #include <zephyr/drivers/dma.h>
42 #define DMA_CHANNEL_ID_RX DT_INST_DMAS_CELL_BY_NAME(0, rx, channel)
43 #define DMA_CHANNEL_ID_TX DT_INST_DMAS_CELL_BY_NAME(0, tx, channel)
44 #define DMA_CHANNEL_ID_XOR DT_INST_DMAS_CELL_BY_NAME(0, xor, channel)
46 BUILD_ASSERT(DMA_CHANNEL_ID_RX < DMA_CHANNEL_COUNT, "Too few DMA channels");
47 BUILD_ASSERT(DMA_CHANNEL_ID_TX < DMA_CHANNEL_COUNT, "Too few DMA channels");
48 BUILD_ASSERT(DMA_CHANNEL_ID_XOR < DMA_CHANNEL_COUNT, "Too few DMA channels");
54 uint32_t current_ctr; /* only used for AES-CTR sessions */
69 static void crypto_si32_dma_completed(const struct device *dev, void *user_data, uint32_t channel, in crypto_si32_dma_completed() argument
77 switch (channel) { in crypto_si32_dma_completed()
79 LOG_DBG("AES0 RX DMA channel %s", result); in crypto_si32_dma_completed()
83 LOG_DBG("AES0 TX DMA channel %s", result); in crypto_si32_dma_completed()
86 LOG_DBG("AES0 XOR DMA channel %s", result); in crypto_si32_dma_completed()
89 LOG_ERR("Unknown DMA channel number: %d", channel); in crypto_si32_dma_completed()
110 if (SI32_AES_0->STATUS.ERRI) { in crypto_si32_irq_error_handler()
112 SI32_AES_0->STATUS.DORF, SI32_AES_0->STATUS.DURF, SI32_AES_0->STATUS.XORF); in crypto_si32_irq_error_handler()
130 __ASSERT(SI32_AES_0->CONTROL.RESET == 0, "Reset done"); in crypto_si32_init()
133 * with software mode (SWMDEN bit is set to 1) and not with DMA operations, where the DMA in crypto_si32_init()
139 * using the DMA with the AES module. in crypto_si32_init()
151 /* For peripheral transfers, firmware should configure the peripheral for the DMA transfer in crypto_si32_init()
152 * and set the device’s DMA crossbar (DMAXBAR) to map a DMA channel to the peripheral. in crypto_si32_init()
167 SI32_AES_0->HWKEY7.U32 = key_as_word[7]; in crypto_si32_aes_set_key()
168 SI32_AES_0->HWKEY6.U32 = key_as_word[6]; in crypto_si32_aes_set_key()
171 SI32_AES_0->HWKEY5.U32 = key_as_word[5]; in crypto_si32_aes_set_key()
172 SI32_AES_0->HWKEY4.U32 = key_as_word[4]; in crypto_si32_aes_set_key()
175 SI32_AES_0->HWKEY3.U32 = key_as_word[3]; in crypto_si32_aes_set_key()
176 SI32_AES_0->HWKEY2.U32 = key_as_word[2]; in crypto_si32_aes_set_key()
177 SI32_AES_0->HWKEY1.U32 = key_as_word[1]; in crypto_si32_aes_set_key()
178 SI32_AES_0->HWKEY0.U32 = key_as_word[0]; in crypto_si32_aes_set_key()
182 return -EINVAL; in crypto_si32_aes_set_key()
194 ret = crypto_si32_aes_set_key(ctx->key.bit_stream, ctx->keylen); in crypto_si32_aes_calc_decryption_key()
225 switch (ctx->keylen) { in crypto_si32_aes_calc_decryption_key()
227 decryption_key_word[7] = SI32_AES_0->HWKEY7.U32; in crypto_si32_aes_calc_decryption_key()
228 decryption_key_word[6] = SI32_AES_0->HWKEY6.U32; in crypto_si32_aes_calc_decryption_key()
231 decryption_key_word[5] = SI32_AES_0->HWKEY5.U32; in crypto_si32_aes_calc_decryption_key()
232 decryption_key_word[4] = SI32_AES_0->HWKEY4.U32; in crypto_si32_aes_calc_decryption_key()
235 decryption_key_word[3] = SI32_AES_0->HWKEY3.U32; in crypto_si32_aes_calc_decryption_key()
236 decryption_key_word[2] = SI32_AES_0->HWKEY2.U32; in crypto_si32_aes_calc_decryption_key()
237 decryption_key_word[1] = SI32_AES_0->HWKEY1.U32; in crypto_si32_aes_calc_decryption_key()
238 decryption_key_word[0] = SI32_AES_0->HWKEY0.U32; in crypto_si32_aes_calc_decryption_key()
241 LOG_ERR("Invalid key len: %" PRIu16, ctx->keylen); in crypto_si32_aes_calc_decryption_key()
242 return -EINVAL; in crypto_si32_aes_calc_decryption_key()
250 switch (ctx->keylen) { in crypto_si32_aes_set_key_size()
261 LOG_ERR("Invalid key len: %" PRIu16, ctx->keylen); in crypto_si32_aes_set_key_size()
262 return -EINVAL; in crypto_si32_aes_set_key_size()
272 __ASSERT(channel_descriptor->CONFIG.SRCSIZE == 2, in assert_dma_settings_common()
274 __ASSERT(channel_descriptor->CONFIG.DSTSIZE == 2, in assert_dma_settings_common()
276 __ASSERT(channel_descriptor->CONFIG.RPOWER == 2, in assert_dma_settings_common()
286 __ASSERT(channel_descriptor->SRCEND.U32 == (uintptr_t)&SI32_AES_0->DATAFIFO, in assert_dma_settings_channel_rx()
288 __ASSERT(channel_descriptor->CONFIG.DSTAIMD == 0b10, in assert_dma_settings_channel_rx()
290 __ASSERT(channel_descriptor->CONFIG.SRCAIMD == 0b11, in assert_dma_settings_channel_rx()
300 __ASSERT(channel_descriptor->DSTEND.U32 == (uintptr_t)&SI32_AES_0->DATAFIFO, in assert_dma_settings_channel_tx()
302 __ASSERT(channel_descriptor->CONFIG.DSTAIMD == 0b11, in assert_dma_settings_channel_tx()
304 __ASSERT(channel_descriptor->CONFIG.SRCAIMD == 0b10, in assert_dma_settings_channel_tx()
314 __ASSERT(channel_descriptor->DSTEND.U32 == (uintptr_t)&SI32_AES_0->XORFIFO, in assert_dma_settings_channel_xor()
316 __ASSERT(channel_descriptor->CONFIG.DSTAIMD == 0b11, in assert_dma_settings_channel_xor()
318 __ASSERT(channel_descriptor->CONFIG.SRCAIMD == 0b10, in assert_dma_settings_channel_xor()
322 /* Set up and start input (TX) DMA channel */
326 const struct device *dma = DEVICE_DT_GET(DT_NODELABEL(dma)); in crypto_si32_dma_setup_tx() local
330 if (!pkt->in_len) { in crypto_si32_dma_setup_tx()
331 LOG_WRN("Zero-sized data"); in crypto_si32_dma_setup_tx()
335 if (pkt->in_len % 16) { in crypto_si32_dma_setup_tx()
336 LOG_ERR("Data size must be 4-word aligned"); in crypto_si32_dma_setup_tx()
337 return -EINVAL; in crypto_si32_dma_setup_tx()
340 dma_block_cfg.block_size = pkt->in_len - in_buf_offset; in crypto_si32_dma_setup_tx()
341 dma_block_cfg.source_address = (uintptr_t)pkt->in_buf + in_buf_offset; in crypto_si32_dma_setup_tx()
343 dma_block_cfg.dest_address = (uintptr_t)&SI32_AES_0->DATAFIFO; in crypto_si32_dma_setup_tx()
357 /* Stop channel to ensure we are not messing with an ongoing DMA operation */ in crypto_si32_dma_setup_tx()
358 ret = dma_stop(dma, DMA_CHANNEL_ID_TX); in crypto_si32_dma_setup_tx()
360 LOG_ERR("TX DMA channel stop failed: %d", ret); in crypto_si32_dma_setup_tx()
364 ret = dma_config(dma, DMA_CHANNEL_ID_TX, &dma_cfg); in crypto_si32_dma_setup_tx()
366 LOG_ERR("TX DMA channel setup failed: %d", ret); in crypto_si32_dma_setup_tx()
370 ret = dma_start(dma, DMA_CHANNEL_ID_TX); in crypto_si32_dma_setup_tx()
372 LOG_ERR("TX DMA channel start failed: %d", ret); in crypto_si32_dma_setup_tx()
379 (struct SI32_DMADESC_A_Struct *)SI32_DMACTRL_0->BASEPTR.U32; in crypto_si32_dma_setup_tx()
381 /* Verify 12.5.2. General DMA Transfer Setup */ in crypto_si32_dma_setup_tx()
386 "The channel request mask (CHREQMCLR) must be cleared for the channel to " in crypto_si32_dma_setup_tx()
389 __ASSERT(SI32_DMAXBAR_0->DMAXBAR0.CH5SEL == 0b0001, in crypto_si32_dma_setup_tx()
396 /* Set up and start output (RX) DMA channel */
401 const struct device *dma = DEVICE_DT_GET(DT_NODELABEL(dma)); in crypto_si32_dma_setup_rx() local
406 if (!pkt->in_len) { in crypto_si32_dma_setup_rx()
407 LOG_WRN("Zero-sized data"); in crypto_si32_dma_setup_rx()
411 if (pkt->in_len % 16) { in crypto_si32_dma_setup_rx()
412 LOG_ERR("Data size must be 4-word aligned"); in crypto_si32_dma_setup_rx()
413 return -EINVAL; in crypto_si32_dma_setup_rx()
416 /* A NULL out_buf indicates an in-place operation. */ in crypto_si32_dma_setup_rx()
417 if (pkt->out_buf == NULL) { in crypto_si32_dma_setup_rx()
418 dest_address = (uintptr_t)pkt->in_buf; in crypto_si32_dma_setup_rx()
420 if ((pkt->out_buf_max - out_buf_offset) < (pkt->in_len - in_buf_offset)) { in crypto_si32_dma_setup_rx()
422 return -ENOMEM; in crypto_si32_dma_setup_rx()
425 dest_address = (uintptr_t)(pkt->out_buf + out_buf_offset); in crypto_si32_dma_setup_rx()
428 /* Set up output (RX) DMA channel */ in crypto_si32_dma_setup_rx()
429 dma_block_cfg.block_size = pkt->in_len - in_buf_offset; in crypto_si32_dma_setup_rx()
430 dma_block_cfg.source_address = (uintptr_t)&SI32_AES_0->DATAFIFO; in crypto_si32_dma_setup_rx()
446 /* Stop channel to ensure we are not messing with an ongoing DMA operation */ in crypto_si32_dma_setup_rx()
447 ret = dma_stop(dma, DMA_CHANNEL_ID_RX); in crypto_si32_dma_setup_rx()
449 LOG_ERR("RX DMA channel stop failed: %d", ret); in crypto_si32_dma_setup_rx()
453 ret = dma_config(dma, DMA_CHANNEL_ID_RX, &dma_cfg); in crypto_si32_dma_setup_rx()
455 LOG_ERR("RX DMA channel setup failed: %d", ret); in crypto_si32_dma_setup_rx()
459 ret = dma_start(dma, DMA_CHANNEL_ID_RX); in crypto_si32_dma_setup_rx()
461 LOG_ERR("RX DMA channel start failed: %d", ret); in crypto_si32_dma_setup_rx()
468 (struct SI32_DMADESC_A_Struct *)SI32_DMACTRL_0->BASEPTR.U32; in crypto_si32_dma_setup_rx()
470 /* As per 12.5.2. General DMA Transfer Setup, check input and output channel in crypto_si32_dma_setup_rx()
477 "The channel request mask (CHREQMCLR) must be cleared for the channel to " in crypto_si32_dma_setup_rx()
480 __ASSERT(SI32_DMAXBAR_0->DMAXBAR0.CH6SEL == 0b0001, in crypto_si32_dma_setup_rx()
487 /* Set up and start XOR DMA channel */
491 const struct device *dma = DEVICE_DT_GET(DT_NODELABEL(dma)); in crypto_si32_dma_setup_xor() local
495 if (!pkt->in_len) { in crypto_si32_dma_setup_xor()
496 LOG_WRN("Zero-sized data"); in crypto_si32_dma_setup_xor()
500 if (pkt->in_len % 16) { in crypto_si32_dma_setup_xor()
501 LOG_ERR("Data size must be 4-word aligned"); in crypto_si32_dma_setup_xor()
502 return -EINVAL; in crypto_si32_dma_setup_xor()
505 dma_block_cfg.block_size = pkt->in_len; in crypto_si32_dma_setup_xor()
506 dma_block_cfg.source_address = (uintptr_t)pkt->in_buf; in crypto_si32_dma_setup_xor()
508 dma_block_cfg.dest_address = (uintptr_t)&SI32_AES_0->XORFIFO; in crypto_si32_dma_setup_xor()
522 /* Stop channel to ensure we are not messing with an ongoing DMA operation */ in crypto_si32_dma_setup_xor()
523 ret = dma_stop(dma, DMA_CHANNEL_ID_XOR); in crypto_si32_dma_setup_xor()
525 LOG_ERR("XOR DMA channel stop failed: %d", ret); in crypto_si32_dma_setup_xor()
529 ret = dma_config(dma, DMA_CHANNEL_ID_XOR, &dma_cfg); in crypto_si32_dma_setup_xor()
531 LOG_ERR("XOR DMA channel setup failed: %d", ret); in crypto_si32_dma_setup_xor()
535 ret = dma_start(dma, DMA_CHANNEL_ID_XOR); in crypto_si32_dma_setup_xor()
537 LOG_ERR("XOR DMA channel start failed: %d", ret); in crypto_si32_dma_setup_xor()
544 (struct SI32_DMADESC_A_Struct *)SI32_DMACTRL_0->BASEPTR.U32; in crypto_si32_dma_setup_xor()
546 /* As per 12.5.2. General DMA Transfer Setup, check input and output channel in crypto_si32_dma_setup_xor()
553 "The channel request mask (CHREQMCLR) must be cleared for the channel to " in crypto_si32_dma_setup_xor()
556 __ASSERT(SI32_DMAXBAR_0->DMAXBAR0.CH7SEL == 0b0001, in crypto_si32_dma_setup_xor()
571 return -EINVAL; in crypto_si32_aes_ecb_op()
574 session = (struct crypto_session *)ctx->drv_sessn_state; in crypto_si32_aes_ecb_op()
578 return -EINVAL; in crypto_si32_aes_ecb_op()
581 if (pkt->in_len % 16) { in crypto_si32_aes_ecb_op()
583 return -EINVAL; in crypto_si32_aes_ecb_op()
586 if (pkt->in_len > 16) { in crypto_si32_aes_ecb_op()
588 return -EINVAL; in crypto_si32_aes_ecb_op()
591 if (pkt->in_len == 0) { in crypto_si32_aes_ecb_op()
592 LOG_DBG("Zero-sized packet"); in crypto_si32_aes_ecb_op()
596 if ((ctx->flags & CAP_INPLACE_OPS) && (pkt->out_buf != NULL)) { in crypto_si32_aes_ecb_op()
597 LOG_ERR("In-place must not have an out_buf"); in crypto_si32_aes_ecb_op()
598 return -EINVAL; in crypto_si32_aes_ecb_op()
601 /* As per 12.6.1./12.6.2. Configuring the DMA for ECB Encryption/Decryption */ in crypto_si32_aes_ecb_op()
603 /* DMA Input Channel */ in crypto_si32_aes_ecb_op()
609 /* DMA Output Channel */ in crypto_si32_aes_ecb_op()
617 /* 1. The XFRSIZE register should be set to N-1, where N is the number of 4-word blocks. */ in crypto_si32_aes_ecb_op()
618 SI32_AES_A_write_xfrsize(SI32_AES_0, pkt->in_len / AES_BLOCK_SIZE - 1); in crypto_si32_aes_ecb_op()
625 ret = crypto_si32_aes_set_key(ctx->key.bit_stream, ctx->keylen); in crypto_si32_aes_ecb_op()
634 ret = crypto_si32_aes_set_key(session->decryption_key, ctx->keylen); in crypto_si32_aes_ecb_op()
641 return -ENOSYS; in crypto_si32_aes_ecb_op()
646 __ASSERT(SI32_AES_0->CONTROL.ERRIEN == 1, "a. ERRIEN set to 1."); in crypto_si32_aes_ecb_op()
665 return -ENOSYS; in crypto_si32_aes_ecb_op()
681 /* Once the DMA and AES settings have been set, the transfer should be started by writing 1 in crypto_si32_aes_ecb_op()
689 return -EIO; in crypto_si32_aes_ecb_op()
692 pkt->out_len = pkt->in_len; in crypto_si32_aes_ecb_op()
707 return -EINVAL; in crypto_si32_aes_cbc_op()
710 session = (struct crypto_session *)ctx->drv_sessn_state; in crypto_si32_aes_cbc_op()
714 return -EINVAL; in crypto_si32_aes_cbc_op()
717 if (pkt->in_len % 16) { in crypto_si32_aes_cbc_op()
719 return -EINVAL; in crypto_si32_aes_cbc_op()
722 if (pkt->in_len == 0) { in crypto_si32_aes_cbc_op()
723 LOG_WRN("Zero-sized packet"); in crypto_si32_aes_cbc_op()
728 if ((ctx->flags & CAP_NO_IV_PREFIX) == 0U) { in crypto_si32_aes_cbc_op()
731 if (pkt->out_buf_max < 16) { in crypto_si32_aes_cbc_op()
733 return -ENOMEM; in crypto_si32_aes_cbc_op()
735 if (!pkt->out_buf) { in crypto_si32_aes_cbc_op()
737 return -EINVAL; in crypto_si32_aes_cbc_op()
739 memcpy(pkt->out_buf, iv, 16); in crypto_si32_aes_cbc_op()
747 return -ENOSYS; in crypto_si32_aes_cbc_op()
751 /* As per 12.7.1.1./12.7.1.2. Configuring the DMA for Hardware CBC Encryption/Decryption */ in crypto_si32_aes_cbc_op()
753 /* DMA Input Channel */ in crypto_si32_aes_cbc_op()
759 /* DMA Output Channel */ in crypto_si32_aes_cbc_op()
768 SI32_AES_0->HWCTR0.U32 = *((uint32_t *)iv); in crypto_si32_aes_cbc_op()
769 SI32_AES_0->HWCTR1.U32 = *((uint32_t *)iv + 1); in crypto_si32_aes_cbc_op()
770 SI32_AES_0->HWCTR2.U32 = *((uint32_t *)iv + 2); in crypto_si32_aes_cbc_op()
771 SI32_AES_0->HWCTR3.U32 = *((uint32_t *)iv + 3); in crypto_si32_aes_cbc_op()
775 /* 1. The XFRSIZE register should be set to N-1, where N is the number of 4-word blocks. */ in crypto_si32_aes_cbc_op()
776 SI32_AES_A_write_xfrsize(SI32_AES_0, (pkt->in_len - in_buf_offset) / AES_BLOCK_SIZE - 1); in crypto_si32_aes_cbc_op()
783 ret = crypto_si32_aes_set_key(ctx->key.bit_stream, ctx->keylen); in crypto_si32_aes_cbc_op()
792 ret = crypto_si32_aes_set_key(session->decryption_key, ctx->keylen); in crypto_si32_aes_cbc_op()
799 return -ENOSYS; in crypto_si32_aes_cbc_op()
804 __ASSERT(SI32_AES_0->CONTROL.ERRIEN == 1, "a. ERRIEN set to 1."); in crypto_si32_aes_cbc_op()
837 return -ENOSYS; in crypto_si32_aes_cbc_op()
851 /* Once the DMA and AES settings have been set, the transfer should be started by writing 1 in crypto_si32_aes_cbc_op()
859 return -EIO; in crypto_si32_aes_cbc_op()
863 *((uint32_t *)iv) = SI32_AES_0->HWCTR0.U32; in crypto_si32_aes_cbc_op()
864 *((uint32_t *)iv + 1) = SI32_AES_0->HWCTR1.U32; in crypto_si32_aes_cbc_op()
865 *((uint32_t *)iv + 2) = SI32_AES_0->HWCTR2.U32; in crypto_si32_aes_cbc_op()
866 *((uint32_t *)iv + 3) = SI32_AES_0->HWCTR3.U32; in crypto_si32_aes_cbc_op()
868 pkt->out_len = pkt->in_len - in_buf_offset + out_buf_offset; in crypto_si32_aes_cbc_op()
880 return -EINVAL; in crypto_si32_aes_ctr_op()
883 session = (struct crypto_session *)ctx->drv_sessn_state; in crypto_si32_aes_ctr_op()
887 return -EINVAL; in crypto_si32_aes_ctr_op()
890 if (pkt->in_len % 16) { in crypto_si32_aes_ctr_op()
892 return -EINVAL; in crypto_si32_aes_ctr_op()
895 if (pkt->in_len == 0) { in crypto_si32_aes_ctr_op()
896 LOG_WRN("Zero-sized packet"); in crypto_si32_aes_ctr_op()
902 /* 12.8.1./12.8.2. Configuring the DMA for CTR Encryption/Decryption */ in crypto_si32_aes_ctr_op()
904 /* DMA Output Channel */ in crypto_si32_aes_ctr_op()
910 /* DMA XOR Channel */ in crypto_si32_aes_ctr_op()
919 switch (ctx->mode_params.ctr_info.ctr_len) { in crypto_si32_aes_ctr_op()
921 SI32_AES_0->HWCTR3.U32 = sys_cpu_to_be32(session->current_ctr); in crypto_si32_aes_ctr_op()
922 SI32_AES_0->HWCTR2.U32 = *((uint32_t *)iv + 2); in crypto_si32_aes_ctr_op()
923 SI32_AES_0->HWCTR1.U32 = *((uint32_t *)iv + 1); in crypto_si32_aes_ctr_op()
924 SI32_AES_0->HWCTR0.U32 = *((uint32_t *)iv); in crypto_si32_aes_ctr_op()
927 LOG_ERR("Unsupported counter length: %" PRIu16, ctx->mode_params.ctr_info.ctr_len); in crypto_si32_aes_ctr_op()
928 ret = -ENOSYS; in crypto_si32_aes_ctr_op()
934 /* 1. The XFRSIZE register should be set to N-1, where N is the number of 4-word blocks. */ in crypto_si32_aes_ctr_op()
935 SI32_AES_A_write_xfrsize(SI32_AES_0, pkt->in_len / AES_BLOCK_SIZE - 1); in crypto_si32_aes_ctr_op()
939 ret = crypto_si32_aes_set_key(ctx->key.bit_stream, ctx->keylen); in crypto_si32_aes_ctr_op()
946 __ASSERT(SI32_AES_0->CONTROL.ERRIEN == 1, "a. ERRIEN set to 1."); in crypto_si32_aes_ctr_op()
974 /* Once the DMA and AES settings have been set, the transfer should be started by writing 1 in crypto_si32_aes_ctr_op()
982 ret = -EIO; in crypto_si32_aes_ctr_op()
987 switch (ctx->mode_params.ctr_info.ctr_len) { in crypto_si32_aes_ctr_op()
989 session->current_ctr = sys_be32_to_cpu(SI32_AES_0->HWCTR3.U32); in crypto_si32_aes_ctr_op()
992 LOG_ERR("Unsupported counter length: %" PRIu16, ctx->mode_params.ctr_info.ctr_len); in crypto_si32_aes_ctr_op()
993 ret = -ENOSYS; in crypto_si32_aes_ctr_op()
997 pkt->out_len = pkt->in_len; in crypto_si32_aes_ctr_op()
1058 return -ENOTSUP; in crypto_si32_begin_session()
1061 if (!(ctx->flags & CAP_SYNC_OPS)) { in crypto_si32_begin_session()
1063 return -ENOTSUP; in crypto_si32_begin_session()
1066 if (ctx->key.bit_stream == NULL) { in crypto_si32_begin_session()
1068 return -EINVAL; in crypto_si32_begin_session()
1071 if (ctx->keylen != 16) { in crypto_si32_begin_session()
1072 LOG_ERR("Only AES-128 implemented"); in crypto_si32_begin_session()
1073 return -ENOSYS; in crypto_si32_begin_session()
1078 if (ctx->flags & CAP_INPLACE_OPS && (ctx->flags & CAP_NO_IV_PREFIX) == 0) { in crypto_si32_begin_session()
1079 LOG_ERR("In-place requires no IV prefix"); in crypto_si32_begin_session()
1080 return -EINVAL; in crypto_si32_begin_session()
1084 if (ctx->mode_params.ctr_info.ctr_len != 32U) { in crypto_si32_begin_session()
1086 return -ENOSYS; in crypto_si32_begin_session()
1110 ret = -ENOSPC; in crypto_si32_begin_session()
1118 ctx->ops.block_crypt_hndlr = crypto_si32_aes_ecb_encrypt; in crypto_si32_begin_session()
1121 ctx->ops.cbc_crypt_hndlr = crypto_si32_aes_cbc_encrypt; in crypto_si32_begin_session()
1124 ctx->ops.ctr_crypt_hndlr = crypto_si32_aes_ctr_op; in crypto_si32_begin_session()
1125 session->current_ctr = 0; in crypto_si32_begin_session()
1131 ret = -ENOSYS; in crypto_si32_begin_session()
1138 ctx->ops.block_crypt_hndlr = crypto_si32_aes_ecb_decrypt; in crypto_si32_begin_session()
1139 ret = crypto_si32_aes_calc_decryption_key(ctx, session->decryption_key); in crypto_si32_begin_session()
1145 ctx->ops.cbc_crypt_hndlr = crypto_si32_aes_cbc_decrypt; in crypto_si32_begin_session()
1146 ret = crypto_si32_aes_calc_decryption_key(ctx, session->decryption_key); in crypto_si32_begin_session()
1152 ctx->ops.ctr_crypt_hndlr = crypto_si32_aes_ctr_op; in crypto_si32_begin_session()
1153 session->current_ctr = 0; in crypto_si32_begin_session()
1159 ret = -ENOSYS; in crypto_si32_begin_session()
1165 ret = -ENOSYS; in crypto_si32_begin_session()
1169 session->in_use = true; in crypto_si32_begin_session()
1170 ctx->drv_sessn_state = session; in crypto_si32_begin_session()
1184 return -EINVAL; in crypto_si32_free_session()
1187 struct crypto_session *session = (struct crypto_session *)ctx->drv_sessn_state; in crypto_si32_free_session()
1190 session->in_use = false; in crypto_si32_free_session()