Lines Matching +full:assoc +full:- +full:select

1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
67 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_aead_exit()
70 crypto_tfm_alg_name(&tfm->base)); in cc_aead_exit()
73 if (ctx->enckey) { in cc_aead_exit()
74 dma_free_coherent(dev, AES_MAX_KEY_SIZE, ctx->enckey, in cc_aead_exit()
75 ctx->enckey_dma_addr); in cc_aead_exit()
77 &ctx->enckey_dma_addr); in cc_aead_exit()
78 ctx->enckey_dma_addr = 0; in cc_aead_exit()
79 ctx->enckey = NULL; in cc_aead_exit()
82 if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { /* XCBC authetication */ in cc_aead_exit()
83 struct cc_xcbc_s *xcbc = &ctx->auth_state.xcbc; in cc_aead_exit()
85 if (xcbc->xcbc_keys) { in cc_aead_exit()
87 xcbc->xcbc_keys, in cc_aead_exit()
88 xcbc->xcbc_keys_dma_addr); in cc_aead_exit()
91 &xcbc->xcbc_keys_dma_addr); in cc_aead_exit()
92 xcbc->xcbc_keys_dma_addr = 0; in cc_aead_exit()
93 xcbc->xcbc_keys = NULL; in cc_aead_exit()
94 } else if (ctx->auth_mode != DRV_HASH_NULL) { /* HMAC auth. */ in cc_aead_exit()
95 struct cc_hmac_s *hmac = &ctx->auth_state.hmac; in cc_aead_exit()
97 if (hmac->ipad_opad) { in cc_aead_exit()
99 hmac->ipad_opad, in cc_aead_exit()
100 hmac->ipad_opad_dma_addr); in cc_aead_exit()
102 &hmac->ipad_opad_dma_addr); in cc_aead_exit()
103 hmac->ipad_opad_dma_addr = 0; in cc_aead_exit()
104 hmac->ipad_opad = NULL; in cc_aead_exit()
106 if (hmac->padded_authkey) { in cc_aead_exit()
108 hmac->padded_authkey, in cc_aead_exit()
109 hmac->padded_authkey_dma_addr); in cc_aead_exit()
111 &hmac->padded_authkey_dma_addr); in cc_aead_exit()
112 hmac->padded_authkey_dma_addr = 0; in cc_aead_exit()
113 hmac->padded_authkey = NULL; in cc_aead_exit()
122 return cc_get_default_hash_len(ctx->drvdata); in cc_get_aead_hash_len()
131 struct device *dev = drvdata_to_dev(cc_alg->drvdata); in cc_aead_init()
134 crypto_tfm_alg_name(&tfm->base)); in cc_aead_init()
137 ctx->cipher_mode = cc_alg->cipher_mode; in cc_aead_init()
138 ctx->flow_mode = cc_alg->flow_mode; in cc_aead_init()
139 ctx->auth_mode = cc_alg->auth_mode; in cc_aead_init()
140 ctx->drvdata = cc_alg->drvdata; in cc_aead_init()
144 ctx->enckey = dma_alloc_coherent(dev, AES_MAX_KEY_SIZE, in cc_aead_init()
145 &ctx->enckey_dma_addr, GFP_KERNEL); in cc_aead_init()
146 if (!ctx->enckey) { in cc_aead_init()
150 dev_dbg(dev, "Allocated enckey buffer in context ctx->enckey=@%p\n", in cc_aead_init()
151 ctx->enckey); in cc_aead_init()
155 if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { /* XCBC authetication */ in cc_aead_init()
156 struct cc_xcbc_s *xcbc = &ctx->auth_state.xcbc; in cc_aead_init()
159 /* Allocate dma-coherent buffer for XCBC's K1+K2+K3 */ in cc_aead_init()
160 /* (and temporary for user key - up to 256b) */ in cc_aead_init()
161 xcbc->xcbc_keys = dma_alloc_coherent(dev, key_size, in cc_aead_init()
162 &xcbc->xcbc_keys_dma_addr, in cc_aead_init()
164 if (!xcbc->xcbc_keys) { in cc_aead_init()
168 } else if (ctx->auth_mode != DRV_HASH_NULL) { /* HMAC authentication */ in cc_aead_init()
169 struct cc_hmac_s *hmac = &ctx->auth_state.hmac; in cc_aead_init()
171 dma_addr_t *pkey_dma = &hmac->padded_authkey_dma_addr; in cc_aead_init()
173 /* Allocate dma-coherent buffer for IPAD + OPAD */ in cc_aead_init()
174 hmac->ipad_opad = dma_alloc_coherent(dev, digest_size, in cc_aead_init()
175 &hmac->ipad_opad_dma_addr, in cc_aead_init()
178 if (!hmac->ipad_opad) { in cc_aead_init()
183 dev_dbg(dev, "Allocated authkey buffer in context ctx->authkey=@%p\n", in cc_aead_init()
184 hmac->ipad_opad); in cc_aead_init()
186 hmac->padded_authkey = dma_alloc_coherent(dev, in cc_aead_init()
191 if (!hmac->padded_authkey) { in cc_aead_init()
196 ctx->auth_state.hmac.ipad_opad = NULL; in cc_aead_init()
197 ctx->auth_state.hmac.padded_authkey = NULL; in cc_aead_init()
199 ctx->hash_len = cc_get_aead_hash_len(tfm); in cc_aead_init()
205 return -ENOMEM; in cc_aead_init()
216 if (err == -EINPROGRESS) in cc_aead_complete()
222 areq->iv = areq_ctx->backup_iv; in cc_aead_complete()
227 if (areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { in cc_aead_complete()
228 if (memcmp(areq_ctx->mac_buf, areq_ctx->icv_virt_addr, in cc_aead_complete()
229 ctx->authsize) != 0) { in cc_aead_complete()
230 dev_dbg(dev, "Payload authentication failure, (auth-size=%d, cipher=%d)\n", in cc_aead_complete()
231 ctx->authsize, ctx->cipher_mode); in cc_aead_complete()
233 * revealed the decrypted message --> zero its memory. in cc_aead_complete()
235 sg_zero_buffer(areq->dst, sg_nents(areq->dst), in cc_aead_complete()
236 areq->cryptlen, areq->assoclen); in cc_aead_complete()
237 err = -EBADMSG; in cc_aead_complete()
240 } else if (areq_ctx->is_icv_fragmented) { in cc_aead_complete()
241 u32 skip = areq->cryptlen + areq_ctx->dst_offset; in cc_aead_complete()
243 cc_copy_sg_portion(dev, areq_ctx->mac_buf, areq_ctx->dst_sgl, in cc_aead_complete()
244 skip, (skip + ctx->authsize), in cc_aead_complete()
261 ctx->auth_state.xcbc.xcbc_keys_dma_addr, ctx->auth_keylen, in xcbc_setkey()
265 set_key_size_aes(&desc[0], ctx->auth_keylen); in xcbc_setkey()
272 set_dout_dlli(&desc[1], ctx->auth_state.xcbc.xcbc_keys_dma_addr, in xcbc_setkey()
278 set_dout_dlli(&desc[2], (ctx->auth_state.xcbc.xcbc_keys_dma_addr in xcbc_setkey()
285 set_dout_dlli(&desc[3], (ctx->auth_state.xcbc.xcbc_keys_dma_addr in xcbc_setkey()
297 unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ? in hmac_setkey()
299 unsigned int digest_size = (ctx->auth_mode == DRV_HASH_SHA1) ? in hmac_setkey()
301 struct cc_hmac_s *hmac = &ctx->auth_state.hmac; in hmac_setkey()
312 cc_larval_digest_addr(ctx->drvdata, in hmac_setkey()
313 ctx->auth_mode), in hmac_setkey()
322 set_din_const(&desc[idx], 0, ctx->hash_len); in hmac_setkey()
338 hmac->padded_authkey_dma_addr, in hmac_setkey()
349 (hmac->ipad_opad_dma_addr + digest_ofs), in hmac_setkey()
364 struct device *dev = drvdata_to_dev(ctx->drvdata); in validate_keys_sizes()
367 ctx->enc_keylen, ctx->auth_keylen); in validate_keys_sizes()
369 switch (ctx->auth_mode) { in validate_keys_sizes()
374 if (ctx->auth_keylen != AES_KEYSIZE_128 && in validate_keys_sizes()
375 ctx->auth_keylen != AES_KEYSIZE_192 && in validate_keys_sizes()
376 ctx->auth_keylen != AES_KEYSIZE_256) in validate_keys_sizes()
377 return -ENOTSUPP; in validate_keys_sizes()
379 case DRV_HASH_NULL: /* Not authenc (e.g., CCM) - no auth_key) */ in validate_keys_sizes()
380 if (ctx->auth_keylen > 0) in validate_keys_sizes()
381 return -EINVAL; in validate_keys_sizes()
384 dev_dbg(dev, "Invalid auth_mode=%d\n", ctx->auth_mode); in validate_keys_sizes()
385 return -EINVAL; in validate_keys_sizes()
388 if (ctx->flow_mode == S_DIN_to_DES) { in validate_keys_sizes()
389 if (ctx->enc_keylen != DES3_EDE_KEY_SIZE) { in validate_keys_sizes()
391 ctx->enc_keylen); in validate_keys_sizes()
392 return -EINVAL; in validate_keys_sizes()
395 if (ctx->enc_keylen != AES_KEYSIZE_128 && in validate_keys_sizes()
396 ctx->enc_keylen != AES_KEYSIZE_192 && in validate_keys_sizes()
397 ctx->enc_keylen != AES_KEYSIZE_256) { in validate_keys_sizes()
399 ctx->enc_keylen); in validate_keys_sizes()
400 return -EINVAL; in validate_keys_sizes()
415 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_get_plain_hmac_key()
426 ctx->auth_state.hmac.padded_authkey_dma_addr; in cc_get_plain_hmac_key()
428 switch (ctx->auth_mode) { /* auth_key required and >0 */ in cc_get_plain_hmac_key()
445 return -ENOMEM; in cc_get_plain_hmac_key()
452 return -ENOMEM; in cc_get_plain_hmac_key()
458 larval_addr = cc_larval_digest_addr(ctx->drvdata, in cc_get_plain_hmac_key()
459 ctx->auth_mode); in cc_get_plain_hmac_key()
468 set_din_const(&desc[idx], 0, ctx->hash_len); in cc_get_plain_hmac_key()
493 set_din_const(&desc[idx], 0, (blocksize - digestsize)); in cc_get_plain_hmac_key()
496 digestsize), (blocksize - digestsize), in cc_get_plain_hmac_key()
508 if ((blocksize - keylen) != 0) { in cc_get_plain_hmac_key()
511 (blocksize - keylen)); in cc_get_plain_hmac_key()
516 (blocksize - keylen), NS_BIT, 0); in cc_get_plain_hmac_key()
522 set_din_const(&desc[idx], 0, (blocksize - keylen)); in cc_get_plain_hmac_key()
529 rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, idx); in cc_get_plain_hmac_key()
548 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_aead_setkey()
557 if (ctx->auth_mode != DRV_HASH_NULL) { /* authenc() alg. */ in cc_aead_setkey()
565 ctx->enc_keylen = keys.enckeylen; in cc_aead_setkey()
566 ctx->auth_keylen = keys.authkeylen; in cc_aead_setkey()
568 if (ctx->cipher_mode == DRV_CIPHER_CTR) { in cc_aead_setkey()
570 if (ctx->enc_keylen < in cc_aead_setkey()
572 return -EINVAL; in cc_aead_setkey()
576 memcpy(ctx->ctr_nonce, enckey + ctx->enc_keylen - in cc_aead_setkey()
579 ctx->enc_keylen -= CTR_RFC3686_NONCE_SIZE; in cc_aead_setkey()
581 } else { /* non-authenc - has just one key */ in cc_aead_setkey()
584 ctx->enc_keylen = keylen; in cc_aead_setkey()
585 ctx->auth_keylen = 0; in cc_aead_setkey()
595 memcpy(ctx->enckey, enckey, ctx->enc_keylen); in cc_aead_setkey()
596 if (ctx->enc_keylen == 24) in cc_aead_setkey()
597 memset(ctx->enckey + 24, 0, CC_AES_KEY_SIZE_MAX - 24); in cc_aead_setkey()
598 if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { in cc_aead_setkey()
599 memcpy(ctx->auth_state.xcbc.xcbc_keys, authkey, in cc_aead_setkey()
600 ctx->auth_keylen); in cc_aead_setkey()
601 } else if (ctx->auth_mode != DRV_HASH_NULL) { /* HMAC */ in cc_aead_setkey()
602 rc = cc_get_plain_hmac_key(tfm, authkey, ctx->auth_keylen); in cc_aead_setkey()
609 switch (ctx->auth_mode) { in cc_aead_setkey()
617 case DRV_HASH_NULL: /* non-authenc modes, e.g., CCM */ in cc_aead_setkey()
620 dev_err(dev, "Unsupported authenc (%d)\n", ctx->auth_mode); in cc_aead_setkey()
621 return -ENOTSUPP; in cc_aead_setkey()
627 rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, seq_len); in cc_aead_setkey()
661 return -EINVAL; in cc_rfc4309_ccm_setkey()
663 keylen -= 3; in cc_rfc4309_ccm_setkey()
664 memcpy(ctx->ctr_nonce, key + keylen, 3); in cc_rfc4309_ccm_setkey()
673 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_aead_setauthsize()
678 return -ENOTSUPP; in cc_aead_setauthsize()
681 ctx->authsize = authsize; in cc_aead_setauthsize()
682 dev_dbg(dev, "authlen=%d\n", ctx->authsize); in cc_aead_setauthsize()
696 return -EINVAL; in cc_rfc4309_ccm_setauthsize()
715 return -EINVAL; in cc_ccm_setauthsize()
727 enum cc_req_dma_buf_type assoc_dma_type = areq_ctx->assoc_buff_type; in cc_set_assoc_desc()
729 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_set_assoc_desc()
733 dev_dbg(dev, "ASSOC buffer type DLLI\n"); in cc_set_assoc_desc()
735 set_din_type(&desc[idx], DMA_DLLI, sg_dma_address(areq->src), in cc_set_assoc_desc()
736 areq_ctx->assoclen, NS_BIT); in cc_set_assoc_desc()
738 if (ctx->auth_mode == DRV_HASH_XCBC_MAC && in cc_set_assoc_desc()
739 areq_ctx->cryptlen > 0) in cc_set_assoc_desc()
743 dev_dbg(dev, "ASSOC buffer type MLLI\n"); in cc_set_assoc_desc()
745 set_din_type(&desc[idx], DMA_MLLI, areq_ctx->assoc.sram_addr, in cc_set_assoc_desc()
746 areq_ctx->assoc.mlli_nents, NS_BIT); in cc_set_assoc_desc()
748 if (ctx->auth_mode == DRV_HASH_XCBC_MAC && in cc_set_assoc_desc()
749 areq_ctx->cryptlen > 0) in cc_set_assoc_desc()
754 dev_err(dev, "Invalid ASSOC buffer type\n"); in cc_set_assoc_desc()
766 enum cc_req_dma_buf_type data_dma_type = areq_ctx->data_buff_type; in cc_proc_authen_desc()
770 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_proc_authen_desc()
777 areq_ctx->dst_sgl : areq_ctx->src_sgl; in cc_proc_authen_desc()
781 areq_ctx->dst_offset : areq_ctx->src_offset; in cc_proc_authen_desc()
786 areq_ctx->cryptlen, NS_BIT); in cc_proc_authen_desc()
792 /* DOUBLE-PASS flow (as default) in cc_proc_authen_desc()
793 * assoc. + iv + data -compact in one table in cc_proc_authen_desc()
796 u32 mlli_addr = areq_ctx->assoc.sram_addr; in cc_proc_authen_desc()
797 u32 mlli_nents = areq_ctx->assoc.mlli_nents; in cc_proc_authen_desc()
799 if (areq_ctx->is_single_pass) { in cc_proc_authen_desc()
801 mlli_addr = areq_ctx->dst.sram_addr; in cc_proc_authen_desc()
802 mlli_nents = areq_ctx->dst.mlli_nents; in cc_proc_authen_desc()
804 mlli_addr = areq_ctx->src.sram_addr; in cc_proc_authen_desc()
805 mlli_nents = areq_ctx->src.mlli_nents; in cc_proc_authen_desc()
831 enum cc_req_dma_buf_type data_dma_type = areq_ctx->data_buff_type; in cc_proc_cipher_desc()
834 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_proc_cipher_desc()
836 if (areq_ctx->cryptlen == 0) in cc_proc_cipher_desc()
844 (sg_dma_address(areq_ctx->src_sgl) + in cc_proc_cipher_desc()
845 areq_ctx->src_offset), areq_ctx->cryptlen, in cc_proc_cipher_desc()
848 (sg_dma_address(areq_ctx->dst_sgl) + in cc_proc_cipher_desc()
849 areq_ctx->dst_offset), in cc_proc_cipher_desc()
850 areq_ctx->cryptlen, NS_BIT, 0); in cc_proc_cipher_desc()
856 set_din_type(&desc[idx], DMA_MLLI, areq_ctx->src.sram_addr, in cc_proc_cipher_desc()
857 areq_ctx->src.mlli_nents, NS_BIT); in cc_proc_cipher_desc()
858 set_dout_mlli(&desc[idx], areq_ctx->dst.sram_addr, in cc_proc_cipher_desc()
859 areq_ctx->dst.mlli_nents, NS_BIT, 0); in cc_proc_cipher_desc()
878 unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ? in cc_proc_digest_desc()
880 int direct = req_ctx->gen_ctx.op_type; in cc_proc_digest_desc()
887 set_dout_dlli(&desc[idx], req_ctx->icv_dma_addr, ctx->authsize, in cc_proc_digest_desc()
889 set_queue_last_ind(ctx->drvdata, &desc[idx]); in cc_proc_digest_desc()
890 if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { in cc_proc_digest_desc()
903 set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, in cc_proc_digest_desc()
904 ctx->authsize, NS_BIT, 1); in cc_proc_digest_desc()
905 set_queue_last_ind(ctx->drvdata, &desc[idx]); in cc_proc_digest_desc()
909 if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { in cc_proc_digest_desc()
927 unsigned int hw_iv_size = req_ctx->hw_iv_size; in cc_set_cipher_desc()
929 int direct = req_ctx->gen_ctx.op_type; in cc_set_cipher_desc()
934 set_flow_mode(&desc[idx], ctx->flow_mode); in cc_set_cipher_desc()
935 set_din_type(&desc[idx], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr, in cc_set_cipher_desc()
937 if (ctx->cipher_mode == DRV_CIPHER_CTR) in cc_set_cipher_desc()
941 set_cipher_mode(&desc[idx], ctx->cipher_mode); in cc_set_cipher_desc()
948 set_flow_mode(&desc[idx], ctx->flow_mode); in cc_set_cipher_desc()
949 if (ctx->flow_mode == S_DIN_to_AES) { in cc_set_cipher_desc()
950 set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr, in cc_set_cipher_desc()
951 ((ctx->enc_keylen == 24) ? CC_AES_KEY_SIZE_MAX : in cc_set_cipher_desc()
952 ctx->enc_keylen), NS_BIT); in cc_set_cipher_desc()
953 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_set_cipher_desc()
955 set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr, in cc_set_cipher_desc()
956 ctx->enc_keylen, NS_BIT); in cc_set_cipher_desc()
957 set_key_size_des(&desc[idx], ctx->enc_keylen); in cc_set_cipher_desc()
959 set_cipher_mode(&desc[idx], ctx->cipher_mode); in cc_set_cipher_desc()
969 int direct = req_ctx->gen_ctx.op_type; in cc_proc_cipher()
972 if (req_ctx->cryptlen == 0) in cc_proc_cipher()
993 unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ? in cc_set_hmac_desc()
995 unsigned int digest_size = (ctx->auth_mode == DRV_HASH_SHA1) ? in cc_set_hmac_desc()
1003 ctx->auth_state.hmac.ipad_opad_dma_addr, digest_size, in cc_set_hmac_desc()
1012 set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode), in cc_set_hmac_desc()
1013 ctx->hash_len); in cc_set_hmac_desc()
1042 ctx->auth_state.xcbc.xcbc_keys_dma_addr, in cc_set_xcbc_desc()
1055 (ctx->auth_state.xcbc.xcbc_keys_dma_addr + in cc_set_xcbc_desc()
1068 (ctx->auth_state.xcbc.xcbc_keys_dma_addr + in cc_set_xcbc_desc()
1089 if (areq_ctx->assoclen > 0) in cc_proc_header_desc()
1102 struct cc_aead_handle *aead_handle = ctx->drvdata->aead_handle; in cc_proc_scheme_desc()
1103 unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ? in cc_proc_scheme_desc()
1105 unsigned int digest_size = (ctx->auth_mode == DRV_HASH_SHA1) ? in cc_proc_scheme_desc()
1111 set_dout_sram(&desc[idx], aead_handle->sram_workspace_addr, in cc_proc_scheme_desc()
1112 ctx->hash_len); in cc_proc_scheme_desc()
1120 set_dout_sram(&desc[idx], aead_handle->sram_workspace_addr, in cc_proc_scheme_desc()
1132 (ctx->auth_state.hmac.ipad_opad_dma_addr + digest_size), in cc_proc_scheme_desc()
1141 set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode), in cc_proc_scheme_desc()
1142 ctx->hash_len); in cc_proc_scheme_desc()
1150 set_din_sram(&desc[idx], aead_handle->sram_workspace_addr, in cc_proc_scheme_desc()
1164 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_mlli_to_sram()
1166 if ((req_ctx->assoc_buff_type == CC_DMA_BUF_MLLI || in cc_mlli_to_sram()
1167 req_ctx->data_buff_type == CC_DMA_BUF_MLLI || in cc_mlli_to_sram()
1168 !req_ctx->is_single_pass) && req_ctx->mlli_params.mlli_len) { in cc_mlli_to_sram()
1169 dev_dbg(dev, "Copy-to-sram: mlli_dma=%08x, mlli_size=%u\n", in cc_mlli_to_sram()
1170 ctx->drvdata->mlli_sram_addr, in cc_mlli_to_sram()
1171 req_ctx->mlli_params.mlli_len); in cc_mlli_to_sram()
1172 /* Copy MLLI table host-to-sram */ in cc_mlli_to_sram()
1175 req_ctx->mlli_params.mlli_dma_addr, in cc_mlli_to_sram()
1176 req_ctx->mlli_params.mlli_len, NS_BIT); in cc_mlli_to_sram()
1178 ctx->drvdata->mlli_sram_addr, in cc_mlli_to_sram()
1179 req_ctx->mlli_params.mlli_len); in cc_mlli_to_sram()
1216 int direct = req_ctx->gen_ctx.op_type; in cc_hmac_authenc()
1218 cc_get_data_flow(direct, ctx->flow_mode, in cc_hmac_authenc()
1219 req_ctx->is_single_pass); in cc_hmac_authenc()
1221 if (req_ctx->is_single_pass) { in cc_hmac_authenc()
1223 * Single-pass flow in cc_hmac_authenc()
1235 * Double-pass flow in cc_hmac_authenc()
1236 * Fallback for unsupported single-pass modes, in cc_hmac_authenc()
1237 * i.e. using assoc. data of non-word-multiple in cc_hmac_authenc()
1269 int direct = req_ctx->gen_ctx.op_type; in cc_xcbc_authenc()
1271 cc_get_data_flow(direct, ctx->flow_mode, in cc_xcbc_authenc()
1272 req_ctx->is_single_pass); in cc_xcbc_authenc()
1274 if (req_ctx->is_single_pass) { in cc_xcbc_authenc()
1276 * Single-pass flow in cc_xcbc_authenc()
1287 * Double-pass flow in cc_xcbc_authenc()
1288 * Fallback for unsupported single-pass modes, in cc_xcbc_authenc()
1289 * i.e. using assoc. data of non-word-multiple in cc_xcbc_authenc()
1316 struct device *dev = drvdata_to_dev(ctx->drvdata); in validate_data_size()
1317 unsigned int assoclen = areq_ctx->assoclen; in validate_data_size()
1319 (req->cryptlen - ctx->authsize) : req->cryptlen; in validate_data_size()
1322 req->cryptlen < ctx->authsize) in validate_data_size()
1325 areq_ctx->is_single_pass = true; /*defaulted to fast flow*/ in validate_data_size()
1327 switch (ctx->flow_mode) { in validate_data_size()
1329 if (ctx->cipher_mode == DRV_CIPHER_CBC && in validate_data_size()
1332 if (ctx->cipher_mode == DRV_CIPHER_CCM) in validate_data_size()
1334 if (ctx->cipher_mode == DRV_CIPHER_GCTR) { in validate_data_size()
1335 if (areq_ctx->plaintext_authenticate_only) in validate_data_size()
1336 areq_ctx->is_single_pass = false; in validate_data_size()
1341 areq_ctx->is_single_pass = false; in validate_data_size()
1343 if (ctx->cipher_mode == DRV_CIPHER_CTR && in validate_data_size()
1345 areq_ctx->is_single_pass = false; in validate_data_size()
1352 areq_ctx->is_single_pass = false; in validate_data_size()
1355 dev_err(dev, "Unexpected flow mode (%d)\n", ctx->flow_mode); in validate_data_size()
1362 return -EINVAL; in validate_data_size()
1372 if (header_size < ((1UL << 16) - (1UL << 8))) { in format_ccm_a0()
1401 return -EOVERFLOW; in set_msg_len()
1404 memcpy(block - csize, (u8 *)&data + 4 - csize, csize); in set_msg_len()
1419 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { in cc_ccm()
1421 mac_result = req_ctx->mac_buf_dma_addr; in cc_ccm()
1424 mac_result = req_ctx->icv_dma_addr; in cc_ccm()
1430 set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr, in cc_ccm()
1431 ((ctx->enc_keylen == 24) ? CC_AES_KEY_SIZE_MAX : in cc_ccm()
1432 ctx->enc_keylen), NS_BIT); in cc_ccm()
1433 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_ccm()
1442 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_ccm()
1444 req_ctx->gen_ctx.iv_dma_addr, AES_BLOCK_SIZE, NS_BIT); in cc_ccm()
1453 set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr, in cc_ccm()
1454 ((ctx->enc_keylen == 24) ? CC_AES_KEY_SIZE_MAX : in cc_ccm()
1455 ctx->enc_keylen), NS_BIT); in cc_ccm()
1456 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_ccm()
1466 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_ccm()
1467 set_din_type(&desc[idx], DMA_DLLI, req_ctx->mac_buf_dma_addr, in cc_ccm()
1475 /* process assoc data */ in cc_ccm()
1476 if (req_ctx->assoclen > 0) { in cc_ccm()
1481 sg_dma_address(&req_ctx->ccm_adata_sg), in cc_ccm()
1482 AES_BLOCK_SIZE + req_ctx->ccm_hdr_size, NS_BIT); in cc_ccm()
1488 if (req_ctx->cryptlen) in cc_ccm()
1494 set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, ctx->authsize, in cc_ccm()
1502 /* load AES-CTR state (for last MAC calculation)*/ in cc_ccm()
1506 set_din_type(&desc[idx], DMA_DLLI, req_ctx->ccm_iv0_dma_addr, in cc_ccm()
1508 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_ccm()
1520 set_din_type(&desc[idx], DMA_DLLI, req_ctx->mac_buf_dma_addr, in cc_ccm()
1521 ctx->authsize, NS_BIT); in cc_ccm()
1522 set_dout_dlli(&desc[idx], mac_result, ctx->authsize, NS_BIT, 1); in cc_ccm()
1523 set_queue_last_ind(ctx->drvdata, &desc[idx]); in cc_ccm()
1535 struct device *dev = drvdata_to_dev(ctx->drvdata); in config_ccm_adata()
1538 unsigned int lp = req->iv[0]; in config_ccm_adata()
1539 /* Note: The code assume that req->iv[0] already contains the value in config_ccm_adata()
1543 unsigned int m = ctx->authsize; /* This is M' of RFC 3610. */ in config_ccm_adata()
1544 u8 *b0 = req_ctx->ccm_config + CCM_B0_OFFSET; in config_ccm_adata()
1545 u8 *a0 = req_ctx->ccm_config + CCM_A0_OFFSET; in config_ccm_adata()
1546 u8 *ctr_count_0 = req_ctx->ccm_config + CCM_CTR_COUNT_0_OFFSET; in config_ccm_adata()
1547 unsigned int cryptlen = (req_ctx->gen_ctx.op_type == in config_ccm_adata()
1549 req->cryptlen : in config_ccm_adata()
1550 (req->cryptlen - ctx->authsize); in config_ccm_adata()
1553 memset(req_ctx->mac_buf, 0, AES_BLOCK_SIZE); in config_ccm_adata()
1554 memset(req_ctx->ccm_config, 0, AES_BLOCK_SIZE * 3); in config_ccm_adata()
1559 dev_dbg(dev, "illegal iv value %X\n", req->iv[0]); in config_ccm_adata()
1560 return -EINVAL; in config_ccm_adata()
1562 memcpy(b0, req->iv, AES_BLOCK_SIZE); in config_ccm_adata()
1565 * NIST Special Publication 800-38C in config_ccm_adata()
1567 *b0 |= (8 * ((m - 2) / 2)); in config_ccm_adata()
1568 if (req_ctx->assoclen > 0) in config_ccm_adata()
1571 rc = set_msg_len(b0 + 16 - l, cryptlen, l); /* Write L'. */ in config_ccm_adata()
1578 /* l(a) - size of associated data. */ in config_ccm_adata()
1579 req_ctx->ccm_hdr_size = format_ccm_a0(a0, req_ctx->assoclen); in config_ccm_adata()
1581 memset(req->iv + 15 - req->iv[0], 0, req->iv[0] + 1); in config_ccm_adata()
1582 req->iv[15] = 1; in config_ccm_adata()
1584 memcpy(ctr_count_0, req->iv, AES_BLOCK_SIZE); in config_ccm_adata()
1597 memset(areq_ctx->ctr_iv, 0, AES_BLOCK_SIZE); in cc_proc_rfc4309_ccm()
1599 * (at most 2^32-1 bytes). in cc_proc_rfc4309_ccm()
1601 areq_ctx->ctr_iv[0] = 3; in cc_proc_rfc4309_ccm()
1603 /* In RFC 4309 there is an 11-bytes nonce+IV part, in cc_proc_rfc4309_ccm()
1606 memcpy(areq_ctx->ctr_iv + CCM_BLOCK_NONCE_OFFSET, ctx->ctr_nonce, in cc_proc_rfc4309_ccm()
1608 memcpy(areq_ctx->ctr_iv + CCM_BLOCK_IV_OFFSET, req->iv, in cc_proc_rfc4309_ccm()
1610 req->iv = areq_ctx->ctr_iv; in cc_proc_rfc4309_ccm()
1625 set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr, in cc_set_ghash_desc()
1626 ctx->enc_keylen, NS_BIT); in cc_set_ghash_desc()
1627 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_set_ghash_desc()
1635 set_dout_dlli(&desc[idx], req_ctx->hkey_dma_addr, AES_BLOCK_SIZE, in cc_set_ghash_desc()
1648 set_din_type(&desc[idx], DMA_DLLI, req_ctx->hkey_dma_addr, in cc_set_ghash_desc()
1661 * select GHASH (according to HW designers) in cc_set_ghash_desc()
1703 set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr, in cc_set_gctr_desc()
1704 ctx->enc_keylen, NS_BIT); in cc_set_gctr_desc()
1705 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_set_gctr_desc()
1710 if (req_ctx->cryptlen && !req_ctx->plaintext_authenticate_only) { in cc_set_gctr_desc()
1714 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_set_gctr_desc()
1716 req_ctx->gcm_iv_inc2_dma_addr, AES_BLOCK_SIZE, in cc_set_gctr_desc()
1737 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { in cc_proc_gcm_result()
1738 mac_result = req_ctx->mac_buf_dma_addr; in cc_proc_gcm_result()
1740 mac_result = req_ctx->icv_dma_addr; in cc_proc_gcm_result()
1745 set_din_type(&desc[idx], DMA_DLLI, req_ctx->gcm_block_len_dma_addr, in cc_proc_gcm_result()
1754 set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, AES_BLOCK_SIZE, in cc_proc_gcm_result()
1765 set_key_size_aes(&desc[idx], ctx->enc_keylen); in cc_proc_gcm_result()
1766 set_din_type(&desc[idx], DMA_DLLI, req_ctx->gcm_iv_inc1_dma_addr, in cc_proc_gcm_result()
1782 set_din_type(&desc[idx], DMA_DLLI, req_ctx->mac_buf_dma_addr, in cc_proc_gcm_result()
1784 set_dout_dlli(&desc[idx], mac_result, ctx->authsize, NS_BIT, 1); in cc_proc_gcm_result()
1785 set_queue_last_ind(ctx->drvdata, &desc[idx]); in cc_proc_gcm_result()
1799 if (req_ctx->plaintext_authenticate_only) { in cc_gcm()
1802 /* process(ghash) assoc data */ in cc_gcm()
1809 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { in cc_gcm()
1817 /* process(ghash) assoc data */ in cc_gcm()
1818 if (req_ctx->assoclen > 0) in cc_gcm()
1822 if (req_ctx->cryptlen) in cc_gcm()
1834 struct device *dev = drvdata_to_dev(ctx->drvdata); in config_gcm_context()
1836 unsigned int cryptlen = (req_ctx->gen_ctx.op_type == in config_gcm_context()
1838 req->cryptlen : in config_gcm_context()
1839 (req->cryptlen - ctx->authsize); in config_gcm_context()
1842 dev_dbg(dev, "%s() cryptlen = %d, req_ctx->assoclen = %d ctx->authsize = %d\n", in config_gcm_context()
1843 __func__, cryptlen, req_ctx->assoclen, ctx->authsize); in config_gcm_context()
1845 memset(req_ctx->hkey, 0, AES_BLOCK_SIZE); in config_gcm_context()
1847 memset(req_ctx->mac_buf, 0, AES_BLOCK_SIZE); in config_gcm_context()
1849 memcpy(req->iv + 12, &counter, 4); in config_gcm_context()
1850 memcpy(req_ctx->gcm_iv_inc2, req->iv, 16); in config_gcm_context()
1853 memcpy(req->iv + 12, &counter, 4); in config_gcm_context()
1854 memcpy(req_ctx->gcm_iv_inc1, req->iv, 16); in config_gcm_context()
1856 if (!req_ctx->plaintext_authenticate_only) { in config_gcm_context()
1859 temp64 = cpu_to_be64(req_ctx->assoclen * 8); in config_gcm_context()
1860 memcpy(&req_ctx->gcm_len_block.len_a, &temp64, sizeof(temp64)); in config_gcm_context()
1862 memcpy(&req_ctx->gcm_len_block.len_c, &temp64, 8); in config_gcm_context()
1869 temp64 = cpu_to_be64((req_ctx->assoclen + cryptlen) * 8); in config_gcm_context()
1870 memcpy(&req_ctx->gcm_len_block.len_a, &temp64, sizeof(temp64)); in config_gcm_context()
1872 memcpy(&req_ctx->gcm_len_block.len_c, &temp64, 8); in config_gcm_context()
1884 memcpy(areq_ctx->ctr_iv + GCM_BLOCK_RFC4_NONCE_OFFSET, in cc_proc_rfc4_gcm()
1885 ctx->ctr_nonce, GCM_BLOCK_RFC4_NONCE_SIZE); in cc_proc_rfc4_gcm()
1886 memcpy(areq_ctx->ctr_iv + GCM_BLOCK_RFC4_IV_OFFSET, req->iv, in cc_proc_rfc4_gcm()
1888 req->iv = areq_ctx->ctr_iv; in cc_proc_rfc4_gcm()
1900 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_proc_aead()
1905 ctx, req, req->iv, sg_virt(req->src), req->src->offset, in cc_proc_aead()
1906 sg_virt(req->dst), req->dst->offset, req->cryptlen); in cc_proc_aead()
1912 dev_err(dev, "Unsupported crypt/assoc len %d/%d.\n", in cc_proc_aead()
1913 req->cryptlen, areq_ctx->assoclen); in cc_proc_aead()
1914 return -EINVAL; in cc_proc_aead()
1922 areq_ctx->gen_ctx.op_type = direct; in cc_proc_aead()
1923 areq_ctx->req_authsize = ctx->authsize; in cc_proc_aead()
1924 areq_ctx->cipher_mode = ctx->cipher_mode; in cc_proc_aead()
1928 if (ctx->cipher_mode == DRV_CIPHER_CTR) { in cc_proc_aead()
1929 /* Build CTR IV - Copy nonce from last 4 bytes in in cc_proc_aead()
1932 memcpy(areq_ctx->ctr_iv, ctx->ctr_nonce, in cc_proc_aead()
1934 memcpy(areq_ctx->ctr_iv + CTR_RFC3686_NONCE_SIZE, req->iv, in cc_proc_aead()
1937 *(__be32 *)(areq_ctx->ctr_iv + CTR_RFC3686_NONCE_SIZE + in cc_proc_aead()
1941 req->iv = areq_ctx->ctr_iv; in cc_proc_aead()
1942 areq_ctx->hw_iv_size = CTR_RFC3686_BLOCK_SIZE; in cc_proc_aead()
1943 } else if ((ctx->cipher_mode == DRV_CIPHER_CCM) || in cc_proc_aead()
1944 (ctx->cipher_mode == DRV_CIPHER_GCTR)) { in cc_proc_aead()
1945 areq_ctx->hw_iv_size = AES_BLOCK_SIZE; in cc_proc_aead()
1946 if (areq_ctx->ctr_iv != req->iv) { in cc_proc_aead()
1947 memcpy(areq_ctx->ctr_iv, req->iv, in cc_proc_aead()
1949 req->iv = areq_ctx->ctr_iv; in cc_proc_aead()
1952 areq_ctx->hw_iv_size = crypto_aead_ivsize(tfm); in cc_proc_aead()
1955 if (ctx->cipher_mode == DRV_CIPHER_CCM) { in cc_proc_aead()
1963 areq_ctx->ccm_hdr_size = ccm_header_size_null; in cc_proc_aead()
1966 if (ctx->cipher_mode == DRV_CIPHER_GCTR) { in cc_proc_aead()
1975 rc = cc_map_aead_request(ctx->drvdata, req); in cc_proc_aead()
1986 switch (ctx->auth_mode) { in cc_proc_aead()
1995 if (ctx->cipher_mode == DRV_CIPHER_CCM) in cc_proc_aead()
1997 if (ctx->cipher_mode == DRV_CIPHER_GCTR) in cc_proc_aead()
2001 dev_err(dev, "Unsupported authenc (%d)\n", ctx->auth_mode); in cc_proc_aead()
2003 rc = -ENOTSUPP; in cc_proc_aead()
2009 rc = cc_send_request(ctx->drvdata, &cc_req, desc, seq_len, &req->base); in cc_proc_aead()
2011 if (rc != -EINPROGRESS && rc != -EBUSY) { in cc_proc_aead()
2028 areq_ctx->backup_iv = req->iv; in cc_aead_encrypt()
2029 areq_ctx->assoclen = req->assoclen; in cc_aead_encrypt()
2032 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_aead_encrypt()
2033 req->iv = areq_ctx->backup_iv; in cc_aead_encrypt()
2045 rc = crypto_ipsec_check_assoclen(req->assoclen); in cc_rfc4309_ccm_encrypt()
2052 areq_ctx->backup_iv = req->iv; in cc_rfc4309_ccm_encrypt()
2053 areq_ctx->assoclen = req->assoclen - CCM_BLOCK_IV_SIZE; in cc_rfc4309_ccm_encrypt()
2058 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_rfc4309_ccm_encrypt()
2059 req->iv = areq_ctx->backup_iv; in cc_rfc4309_ccm_encrypt()
2072 areq_ctx->backup_iv = req->iv; in cc_aead_decrypt()
2073 areq_ctx->assoclen = req->assoclen; in cc_aead_decrypt()
2076 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_aead_decrypt()
2077 req->iv = areq_ctx->backup_iv; in cc_aead_decrypt()
2087 rc = crypto_ipsec_check_assoclen(req->assoclen); in cc_rfc4309_ccm_decrypt()
2094 areq_ctx->backup_iv = req->iv; in cc_rfc4309_ccm_decrypt()
2095 areq_ctx->assoclen = req->assoclen - CCM_BLOCK_IV_SIZE; in cc_rfc4309_ccm_decrypt()
2100 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_rfc4309_ccm_decrypt()
2101 req->iv = areq_ctx->backup_iv; in cc_rfc4309_ccm_decrypt()
2111 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_rfc4106_gcm_setkey()
2116 return -EINVAL; in cc_rfc4106_gcm_setkey()
2118 keylen -= 4; in cc_rfc4106_gcm_setkey()
2119 memcpy(ctx->ctr_nonce, key + keylen, 4); in cc_rfc4106_gcm_setkey()
2128 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_rfc4543_gcm_setkey()
2133 return -EINVAL; in cc_rfc4543_gcm_setkey()
2135 keylen -= 4; in cc_rfc4543_gcm_setkey()
2136 memcpy(ctx->ctr_nonce, key + keylen, 4); in cc_rfc4543_gcm_setkey()
2154 return -EINVAL; in cc_gcm_setauthsize()
2164 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_rfc4106_gcm_setauthsize()
2174 return -EINVAL; in cc_rfc4106_gcm_setauthsize()
2184 struct device *dev = drvdata_to_dev(ctx->drvdata); in cc_rfc4543_gcm_setauthsize()
2189 return -EINVAL; in cc_rfc4543_gcm_setauthsize()
2199 rc = crypto_ipsec_check_assoclen(req->assoclen); in cc_rfc4106_gcm_encrypt()
2206 areq_ctx->backup_iv = req->iv; in cc_rfc4106_gcm_encrypt()
2207 areq_ctx->assoclen = req->assoclen - GCM_BLOCK_RFC4_IV_SIZE; in cc_rfc4106_gcm_encrypt()
2212 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_rfc4106_gcm_encrypt()
2213 req->iv = areq_ctx->backup_iv; in cc_rfc4106_gcm_encrypt()
2223 rc = crypto_ipsec_check_assoclen(req->assoclen); in cc_rfc4543_gcm_encrypt()
2230 areq_ctx->plaintext_authenticate_only = true; in cc_rfc4543_gcm_encrypt()
2233 areq_ctx->backup_iv = req->iv; in cc_rfc4543_gcm_encrypt()
2234 areq_ctx->assoclen = req->assoclen; in cc_rfc4543_gcm_encrypt()
2239 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_rfc4543_gcm_encrypt()
2240 req->iv = areq_ctx->backup_iv; in cc_rfc4543_gcm_encrypt()
2250 rc = crypto_ipsec_check_assoclen(req->assoclen); in cc_rfc4106_gcm_decrypt()
2257 areq_ctx->backup_iv = req->iv; in cc_rfc4106_gcm_decrypt()
2258 areq_ctx->assoclen = req->assoclen - GCM_BLOCK_RFC4_IV_SIZE; in cc_rfc4106_gcm_decrypt()
2263 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_rfc4106_gcm_decrypt()
2264 req->iv = areq_ctx->backup_iv; in cc_rfc4106_gcm_decrypt()
2274 rc = crypto_ipsec_check_assoclen(req->assoclen); in cc_rfc4543_gcm_decrypt()
2281 areq_ctx->plaintext_authenticate_only = true; in cc_rfc4543_gcm_decrypt()
2284 areq_ctx->backup_iv = req->iv; in cc_rfc4543_gcm_decrypt()
2285 areq_ctx->assoclen = req->assoclen; in cc_rfc4543_gcm_decrypt()
2290 if (rc != -EINPROGRESS && rc != -EBUSY) in cc_rfc4543_gcm_decrypt()
2291 req->iv = areq_ctx->backup_iv; in cc_rfc4543_gcm_decrypt()
2300 .driver_name = "authenc-hmac-sha1-cbc-aes-ccree",
2320 .driver_name = "authenc-hmac-sha1-cbc-des3-ccree",
2340 .driver_name = "authenc-hmac-sha256-cbc-aes-ccree",
2360 .driver_name = "authenc-hmac-sha256-cbc-des3-ccree",
2380 .driver_name = "authenc-xcbc-aes-cbc-aes-ccree",
2400 .driver_name = "authenc-hmac-sha1-rfc3686-ctr-aes-ccree",
2420 .driver_name = "authenc-hmac-sha256-rfc3686-ctr-aes-ccree",
2440 .driver_name = "authenc-xcbc-aes-rfc3686-ctr-aes-ccree",
2460 .driver_name = "ccm-aes-ccree",
2480 .driver_name = "rfc4309-ccm-aes-ccree",
2500 .driver_name = "gcm-aes-ccree",
2520 .driver_name = "rfc4106-gcm-aes-ccree",
2540 .driver_name = "rfc4543-gcm-aes-ccree",
2568 return ERR_PTR(-ENOMEM); in cc_create_aead_alg()
2570 alg = &tmpl->template_aead; in cc_create_aead_alg()
2572 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); in cc_create_aead_alg()
2573 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", in cc_create_aead_alg()
2574 tmpl->driver_name); in cc_create_aead_alg()
2575 alg->base.cra_module = THIS_MODULE; in cc_create_aead_alg()
2576 alg->base.cra_priority = CC_CRA_PRIO; in cc_create_aead_alg()
2578 alg->base.cra_ctxsize = sizeof(struct cc_aead_ctx); in cc_create_aead_alg()
2579 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; in cc_create_aead_alg()
2580 alg->base.cra_blocksize = tmpl->blocksize; in cc_create_aead_alg()
2581 alg->init = cc_aead_init; in cc_create_aead_alg()
2582 alg->exit = cc_aead_exit; in cc_create_aead_alg()
2584 t_alg->aead_alg = *alg; in cc_create_aead_alg()
2586 t_alg->cipher_mode = tmpl->cipher_mode; in cc_create_aead_alg()
2587 t_alg->flow_mode = tmpl->flow_mode; in cc_create_aead_alg()
2588 t_alg->auth_mode = tmpl->auth_mode; in cc_create_aead_alg()
2596 struct cc_aead_handle *aead_handle = drvdata->aead_handle; in cc_aead_free()
2599 list_for_each_entry_safe(t_alg, n, &aead_handle->aead_list, entry) { in cc_aead_free()
2600 crypto_unregister_aead(&t_alg->aead_alg); in cc_aead_free()
2601 list_del(&t_alg->entry); in cc_aead_free()
2611 int rc = -ENOMEM; in cc_aead_alloc()
2617 rc = -ENOMEM; in cc_aead_alloc()
2621 INIT_LIST_HEAD(&aead_handle->aead_list); in cc_aead_alloc()
2622 drvdata->aead_handle = aead_handle; in cc_aead_alloc()
2624 aead_handle->sram_workspace_addr = cc_sram_alloc(drvdata, in cc_aead_alloc()
2627 if (aead_handle->sram_workspace_addr == NULL_SRAM_ADDR) { in cc_aead_alloc()
2628 rc = -ENOMEM; in cc_aead_alloc()
2634 if ((aead_algs[alg].min_hw_rev > drvdata->hw_rev) || in cc_aead_alloc()
2635 !(drvdata->std_bodies & aead_algs[alg].std_body)) in cc_aead_alloc()
2645 t_alg->drvdata = drvdata; in cc_aead_alloc()
2646 rc = crypto_register_aead(&t_alg->aead_alg); in cc_aead_alloc()
2649 t_alg->aead_alg.base.cra_driver_name); in cc_aead_alloc()
2653 list_add_tail(&t_alg->entry, &aead_handle->aead_list); in cc_aead_alloc()
2655 t_alg->aead_alg.base.cra_driver_name); in cc_aead_alloc()