Lines Matching refs:areq_ctx

66 	struct aead_req_ctx *areq_ctx = aead_request_ctx(req);  in cc_copy_mac()  local
70 if (areq_ctx->is_gcm4543) in cc_copy_mac()
73 cc_copy_sg_portion(dev, areq_ctx->backup_mac, req->src, in cc_copy_mac()
74 (skip - areq_ctx->req_authsize), skip, dir); in cc_copy_mac()
397 cc_set_aead_conf_buf(struct device *dev, struct aead_req_ctx *areq_ctx, in cc_set_aead_conf_buf() argument
403 sg_init_one(&areq_ctx->ccm_adata_sg, config_data, in cc_set_aead_conf_buf()
404 AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size); in cc_set_aead_conf_buf()
405 if (dma_map_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE) != 1) { in cc_set_aead_conf_buf()
410 &sg_dma_address(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf()
411 sg_page(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf()
412 sg_virt(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf()
413 areq_ctx->ccm_adata_sg.offset, areq_ctx->ccm_adata_sg.length); in cc_set_aead_conf_buf()
416 cc_add_sg_entry(dev, sg_data, 1, &areq_ctx->ccm_adata_sg, in cc_set_aead_conf_buf()
417 (AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size), in cc_set_aead_conf_buf()
423 static int cc_set_hash_buf(struct device *dev, struct ahash_req_ctx *areq_ctx, in cc_set_hash_buf() argument
429 sg_init_one(areq_ctx->buff_sg, curr_buff, curr_buff_cnt); in cc_set_hash_buf()
430 if (dma_map_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE) != 1) { in cc_set_hash_buf()
435 &sg_dma_address(areq_ctx->buff_sg), sg_page(areq_ctx->buff_sg), in cc_set_hash_buf()
436 sg_virt(areq_ctx->buff_sg), areq_ctx->buff_sg->offset, in cc_set_hash_buf()
437 areq_ctx->buff_sg->length); in cc_set_hash_buf()
438 areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; in cc_set_hash_buf()
439 areq_ctx->curr_sg = areq_ctx->buff_sg; in cc_set_hash_buf()
440 areq_ctx->in_nents = 0; in cc_set_hash_buf()
442 cc_add_sg_entry(dev, sg_data, 1, areq_ctx->buff_sg, curr_buff_cnt, 0, in cc_set_hash_buf()
569 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_unmap_aead_request() local
570 unsigned int hw_iv_size = areq_ctx->hw_iv_size; in cc_unmap_aead_request()
577 if (areq_ctx->mac_buf_dma_addr) { in cc_unmap_aead_request()
578 dma_unmap_single(dev, areq_ctx->mac_buf_dma_addr, in cc_unmap_aead_request()
582 if (areq_ctx->cipher_mode == DRV_CIPHER_GCTR) { in cc_unmap_aead_request()
583 if (areq_ctx->hkey_dma_addr) { in cc_unmap_aead_request()
584 dma_unmap_single(dev, areq_ctx->hkey_dma_addr, in cc_unmap_aead_request()
588 if (areq_ctx->gcm_block_len_dma_addr) { in cc_unmap_aead_request()
589 dma_unmap_single(dev, areq_ctx->gcm_block_len_dma_addr, in cc_unmap_aead_request()
593 if (areq_ctx->gcm_iv_inc1_dma_addr) { in cc_unmap_aead_request()
594 dma_unmap_single(dev, areq_ctx->gcm_iv_inc1_dma_addr, in cc_unmap_aead_request()
598 if (areq_ctx->gcm_iv_inc2_dma_addr) { in cc_unmap_aead_request()
599 dma_unmap_single(dev, areq_ctx->gcm_iv_inc2_dma_addr, in cc_unmap_aead_request()
604 if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { in cc_unmap_aead_request()
605 if (areq_ctx->ccm_iv0_dma_addr) { in cc_unmap_aead_request()
606 dma_unmap_single(dev, areq_ctx->ccm_iv0_dma_addr, in cc_unmap_aead_request()
610 dma_unmap_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE); in cc_unmap_aead_request()
612 if (areq_ctx->gen_ctx.iv_dma_addr) { in cc_unmap_aead_request()
613 dma_unmap_single(dev, areq_ctx->gen_ctx.iv_dma_addr, in cc_unmap_aead_request()
620 if (areq_ctx->mlli_params.curr_pool) { in cc_unmap_aead_request()
622 &areq_ctx->mlli_params.mlli_dma_addr, in cc_unmap_aead_request()
623 areq_ctx->mlli_params.mlli_virt_addr); in cc_unmap_aead_request()
624 dma_pool_free(areq_ctx->mlli_params.curr_pool, in cc_unmap_aead_request()
625 areq_ctx->mlli_params.mlli_virt_addr, in cc_unmap_aead_request()
626 areq_ctx->mlli_params.mlli_dma_addr); in cc_unmap_aead_request()
630 sg_virt(req->src), areq_ctx->src.nents, areq_ctx->assoc.nents, in cc_unmap_aead_request()
633 if (areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_ENCRYPT) in cc_unmap_aead_request()
634 size_to_unmap += areq_ctx->req_authsize; in cc_unmap_aead_request()
635 if (areq_ctx->is_gcm4543) in cc_unmap_aead_request()
651 areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT && in cc_unmap_aead_request()
717 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_aead_chain_iv() local
718 unsigned int hw_iv_size = areq_ctx->hw_iv_size; in cc_aead_chain_iv()
723 areq_ctx->gen_ctx.iv_dma_addr = 0; in cc_aead_chain_iv()
727 areq_ctx->gen_ctx.iv_dma_addr = dma_map_single(dev, req->iv, in cc_aead_chain_iv()
730 if (dma_mapping_error(dev, areq_ctx->gen_ctx.iv_dma_addr)) { in cc_aead_chain_iv()
738 hw_iv_size, req->iv, &areq_ctx->gen_ctx.iv_dma_addr); in cc_aead_chain_iv()
740 if (do_chain && areq_ctx->plaintext_authenticate_only) { in cc_aead_chain_iv()
746 (areq_ctx->gen_ctx.iv_dma_addr + iv_ofs), in cc_aead_chain_iv()
748 &areq_ctx->assoc.mlli_nents); in cc_aead_chain_iv()
749 areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_iv()
761 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_aead_chain_assoc() local
770 if (areq_ctx->is_gcm4543) in cc_aead_chain_assoc()
779 areq_ctx->assoc_buff_type = CC_DMA_BUF_NULL; in cc_aead_chain_assoc()
780 areq_ctx->assoc.nents = 0; in cc_aead_chain_assoc()
781 areq_ctx->assoc.mlli_nents = 0; in cc_aead_chain_assoc()
783 cc_dma_buf_type(areq_ctx->assoc_buff_type), in cc_aead_chain_assoc()
784 areq_ctx->assoc.nents); in cc_aead_chain_assoc()
813 areq_ctx->assoc.nents = mapped_nents; in cc_aead_chain_assoc()
818 if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { in cc_aead_chain_assoc()
821 (areq_ctx->assoc.nents + 1), in cc_aead_chain_assoc()
828 if (mapped_nents == 1 && areq_ctx->ccm_hdr_size == ccm_header_size_null) in cc_aead_chain_assoc()
829 areq_ctx->assoc_buff_type = CC_DMA_BUF_DLLI; in cc_aead_chain_assoc()
831 areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_assoc()
833 if (do_chain || areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI) { in cc_aead_chain_assoc()
835 cc_dma_buf_type(areq_ctx->assoc_buff_type), in cc_aead_chain_assoc()
836 areq_ctx->assoc.nents); in cc_aead_chain_assoc()
837 cc_add_sg_entry(dev, sg_data, areq_ctx->assoc.nents, req->src, in cc_aead_chain_assoc()
839 &areq_ctx->assoc.mlli_nents); in cc_aead_chain_assoc()
840 areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_assoc()
850 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_prepare_aead_data_dlli() local
851 enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; in cc_prepare_aead_data_dlli()
852 unsigned int authsize = areq_ctx->req_authsize; in cc_prepare_aead_data_dlli()
854 areq_ctx->is_icv_fragmented = false; in cc_prepare_aead_data_dlli()
857 areq_ctx->icv_dma_addr = sg_dma_address(areq_ctx->src_sgl) + in cc_prepare_aead_data_dlli()
859 areq_ctx->icv_virt_addr = sg_virt(areq_ctx->src_sgl) + in cc_prepare_aead_data_dlli()
863 areq_ctx->icv_dma_addr = sg_dma_address(areq_ctx->src_sgl) + in cc_prepare_aead_data_dlli()
865 areq_ctx->icv_virt_addr = sg_virt(areq_ctx->src_sgl) + in cc_prepare_aead_data_dlli()
869 areq_ctx->icv_dma_addr = sg_dma_address(areq_ctx->dst_sgl) + in cc_prepare_aead_data_dlli()
871 areq_ctx->icv_virt_addr = sg_virt(areq_ctx->dst_sgl) + in cc_prepare_aead_data_dlli()
882 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_prepare_aead_data_mlli() local
883 enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; in cc_prepare_aead_data_mlli()
884 unsigned int authsize = areq_ctx->req_authsize; in cc_prepare_aead_data_mlli()
891 cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
892 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
893 areq_ctx->src_offset, is_last_table, in cc_prepare_aead_data_mlli()
894 &areq_ctx->src.mlli_nents); in cc_prepare_aead_data_mlli()
896 icv_nents = cc_get_aead_icv_nents(dev, areq_ctx->src_sgl, in cc_prepare_aead_data_mlli()
897 areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
899 &areq_ctx->is_icv_fragmented); in cc_prepare_aead_data_mlli()
905 if (areq_ctx->is_icv_fragmented) { in cc_prepare_aead_data_mlli()
919 areq_ctx->icv_virt_addr = areq_ctx->backup_mac; in cc_prepare_aead_data_mlli()
921 areq_ctx->icv_virt_addr = areq_ctx->mac_buf; in cc_prepare_aead_data_mlli()
922 areq_ctx->icv_dma_addr = in cc_prepare_aead_data_mlli()
923 areq_ctx->mac_buf_dma_addr; in cc_prepare_aead_data_mlli()
926 sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; in cc_prepare_aead_data_mlli()
928 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli()
930 areq_ctx->icv_virt_addr = sg_virt(sg) + in cc_prepare_aead_data_mlli()
936 cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
937 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
938 areq_ctx->src_offset, is_last_table, in cc_prepare_aead_data_mlli()
939 &areq_ctx->src.mlli_nents); in cc_prepare_aead_data_mlli()
940 cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, in cc_prepare_aead_data_mlli()
941 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
942 areq_ctx->dst_offset, is_last_table, in cc_prepare_aead_data_mlli()
943 &areq_ctx->dst.mlli_nents); in cc_prepare_aead_data_mlli()
945 icv_nents = cc_get_aead_icv_nents(dev, areq_ctx->src_sgl, in cc_prepare_aead_data_mlli()
946 areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
948 &areq_ctx->is_icv_fragmented); in cc_prepare_aead_data_mlli()
958 if (areq_ctx->is_icv_fragmented) { in cc_prepare_aead_data_mlli()
960 areq_ctx->icv_virt_addr = areq_ctx->backup_mac; in cc_prepare_aead_data_mlli()
963 sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; in cc_prepare_aead_data_mlli()
965 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli()
967 areq_ctx->icv_virt_addr = sg_virt(sg) + in cc_prepare_aead_data_mlli()
973 cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, in cc_prepare_aead_data_mlli()
974 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
975 areq_ctx->dst_offset, is_last_table, in cc_prepare_aead_data_mlli()
976 &areq_ctx->dst.mlli_nents); in cc_prepare_aead_data_mlli()
977 cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
978 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
979 areq_ctx->src_offset, is_last_table, in cc_prepare_aead_data_mlli()
980 &areq_ctx->src.mlli_nents); in cc_prepare_aead_data_mlli()
982 icv_nents = cc_get_aead_icv_nents(dev, areq_ctx->dst_sgl, in cc_prepare_aead_data_mlli()
983 areq_ctx->dst.nents, in cc_prepare_aead_data_mlli()
985 &areq_ctx->is_icv_fragmented); in cc_prepare_aead_data_mlli()
991 if (!areq_ctx->is_icv_fragmented) { in cc_prepare_aead_data_mlli()
992 sg = &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]; in cc_prepare_aead_data_mlli()
994 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli()
996 areq_ctx->icv_virt_addr = sg_virt(sg) + in cc_prepare_aead_data_mlli()
999 areq_ctx->icv_dma_addr = areq_ctx->mac_buf_dma_addr; in cc_prepare_aead_data_mlli()
1000 areq_ctx->icv_virt_addr = areq_ctx->mac_buf; in cc_prepare_aead_data_mlli()
1013 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_aead_chain_data() local
1015 enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; in cc_aead_chain_data()
1016 unsigned int authsize = areq_ctx->req_authsize; in cc_aead_chain_data()
1026 bool is_gcm4543 = areq_ctx->is_gcm4543; in cc_aead_chain_data()
1037 areq_ctx->src_sgl = req->src; in cc_aead_chain_data()
1038 areq_ctx->dst_sgl = req->dst; in cc_aead_chain_data()
1047 sg_index = areq_ctx->src_sgl->length; in cc_aead_chain_data()
1050 offset -= areq_ctx->src_sgl->length; in cc_aead_chain_data()
1051 areq_ctx->src_sgl = sg_next(areq_ctx->src_sgl); in cc_aead_chain_data()
1053 if (!areq_ctx->src_sgl) { in cc_aead_chain_data()
1057 sg_index += areq_ctx->src_sgl->length; in cc_aead_chain_data()
1066 areq_ctx->src.nents = src_mapped_nents; in cc_aead_chain_data()
1068 areq_ctx->src_offset = offset; in cc_aead_chain_data()
1078 &areq_ctx->dst.nents, in cc_aead_chain_data()
1089 sg_index = areq_ctx->dst_sgl->length; in cc_aead_chain_data()
1094 offset -= areq_ctx->dst_sgl->length; in cc_aead_chain_data()
1095 areq_ctx->dst_sgl = sg_next(areq_ctx->dst_sgl); in cc_aead_chain_data()
1097 if (!areq_ctx->dst_sgl) { in cc_aead_chain_data()
1101 sg_index += areq_ctx->dst_sgl->length; in cc_aead_chain_data()
1109 areq_ctx->dst.nents = dst_mapped_nents; in cc_aead_chain_data()
1110 areq_ctx->dst_offset = offset; in cc_aead_chain_data()
1114 areq_ctx->data_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_data()
1119 areq_ctx->data_buff_type = CC_DMA_BUF_DLLI; in cc_aead_chain_data()
1131 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_update_aead_mlli_nents() local
1134 if (areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI) { in cc_update_aead_mlli_nents()
1135 areq_ctx->assoc.sram_addr = drvdata->mlli_sram_addr; in cc_update_aead_mlli_nents()
1136 curr_mlli_size = areq_ctx->assoc.mlli_nents * in cc_update_aead_mlli_nents()
1140 if (areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) { in cc_update_aead_mlli_nents()
1143 areq_ctx->dst.mlli_nents = areq_ctx->src.mlli_nents; in cc_update_aead_mlli_nents()
1144 areq_ctx->src.sram_addr = drvdata->mlli_sram_addr + in cc_update_aead_mlli_nents()
1146 areq_ctx->dst.sram_addr = areq_ctx->src.sram_addr; in cc_update_aead_mlli_nents()
1147 if (!areq_ctx->is_single_pass) in cc_update_aead_mlli_nents()
1148 areq_ctx->assoc.mlli_nents += in cc_update_aead_mlli_nents()
1149 areq_ctx->src.mlli_nents; in cc_update_aead_mlli_nents()
1151 if (areq_ctx->gen_ctx.op_type == in cc_update_aead_mlli_nents()
1153 areq_ctx->src.sram_addr = in cc_update_aead_mlli_nents()
1156 areq_ctx->dst.sram_addr = in cc_update_aead_mlli_nents()
1157 areq_ctx->src.sram_addr + in cc_update_aead_mlli_nents()
1158 areq_ctx->src.mlli_nents * in cc_update_aead_mlli_nents()
1160 if (!areq_ctx->is_single_pass) in cc_update_aead_mlli_nents()
1161 areq_ctx->assoc.mlli_nents += in cc_update_aead_mlli_nents()
1162 areq_ctx->src.mlli_nents; in cc_update_aead_mlli_nents()
1164 areq_ctx->dst.sram_addr = in cc_update_aead_mlli_nents()
1167 areq_ctx->src.sram_addr = in cc_update_aead_mlli_nents()
1168 areq_ctx->dst.sram_addr + in cc_update_aead_mlli_nents()
1169 areq_ctx->dst.mlli_nents * in cc_update_aead_mlli_nents()
1171 if (!areq_ctx->is_single_pass) in cc_update_aead_mlli_nents()
1172 areq_ctx->assoc.mlli_nents += in cc_update_aead_mlli_nents()
1173 areq_ctx->dst.mlli_nents; in cc_update_aead_mlli_nents()
1181 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_map_aead_request() local
1182 struct mlli_params *mlli_params = &areq_ctx->mlli_params; in cc_map_aead_request()
1185 unsigned int authsize = areq_ctx->req_authsize; in cc_map_aead_request()
1189 bool is_gcm4543 = areq_ctx->is_gcm4543; in cc_map_aead_request()
1203 areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT && in cc_map_aead_request()
1208 areq_ctx->cryptlen = (areq_ctx->gen_ctx.op_type == in cc_map_aead_request()
1213 dma_addr = dma_map_single(dev, areq_ctx->mac_buf, MAX_MAC_SIZE, in cc_map_aead_request()
1217 MAX_MAC_SIZE, areq_ctx->mac_buf); in cc_map_aead_request()
1221 areq_ctx->mac_buf_dma_addr = dma_addr; in cc_map_aead_request()
1223 if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { in cc_map_aead_request()
1224 void *addr = areq_ctx->ccm_config + CCM_CTR_COUNT_0_OFFSET; in cc_map_aead_request()
1232 areq_ctx->ccm_iv0_dma_addr = 0; in cc_map_aead_request()
1236 areq_ctx->ccm_iv0_dma_addr = dma_addr; in cc_map_aead_request()
1238 if (cc_set_aead_conf_buf(dev, areq_ctx, areq_ctx->ccm_config, in cc_map_aead_request()
1245 if (areq_ctx->cipher_mode == DRV_CIPHER_GCTR) { in cc_map_aead_request()
1246 dma_addr = dma_map_single(dev, areq_ctx->hkey, AES_BLOCK_SIZE, in cc_map_aead_request()
1250 AES_BLOCK_SIZE, areq_ctx->hkey); in cc_map_aead_request()
1254 areq_ctx->hkey_dma_addr = dma_addr; in cc_map_aead_request()
1256 dma_addr = dma_map_single(dev, &areq_ctx->gcm_len_block, in cc_map_aead_request()
1260 AES_BLOCK_SIZE, &areq_ctx->gcm_len_block); in cc_map_aead_request()
1264 areq_ctx->gcm_block_len_dma_addr = dma_addr; in cc_map_aead_request()
1266 dma_addr = dma_map_single(dev, areq_ctx->gcm_iv_inc1, in cc_map_aead_request()
1271 AES_BLOCK_SIZE, (areq_ctx->gcm_iv_inc1)); in cc_map_aead_request()
1272 areq_ctx->gcm_iv_inc1_dma_addr = 0; in cc_map_aead_request()
1276 areq_ctx->gcm_iv_inc1_dma_addr = dma_addr; in cc_map_aead_request()
1278 dma_addr = dma_map_single(dev, areq_ctx->gcm_iv_inc2, in cc_map_aead_request()
1283 AES_BLOCK_SIZE, (areq_ctx->gcm_iv_inc2)); in cc_map_aead_request()
1284 areq_ctx->gcm_iv_inc2_dma_addr = 0; in cc_map_aead_request()
1288 areq_ctx->gcm_iv_inc2_dma_addr = dma_addr; in cc_map_aead_request()
1292 if (areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_ENCRYPT) in cc_map_aead_request()
1298 &areq_ctx->src.nents, in cc_map_aead_request()
1307 if (areq_ctx->is_single_pass) { in cc_map_aead_request()
1358 if (areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI || in cc_map_aead_request()
1359 areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) { in cc_map_aead_request()
1367 areq_ctx->assoc.mlli_nents); in cc_map_aead_request()
1368 dev_dbg(dev, "src params mn %d\n", areq_ctx->src.mlli_nents); in cc_map_aead_request()
1369 dev_dbg(dev, "dst params mn %d\n", areq_ctx->dst.mlli_nents); in cc_map_aead_request()
1382 struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; in cc_map_hash_request_final() local
1384 u8 *curr_buff = cc_hash_buf(areq_ctx); in cc_map_hash_request_final()
1385 u32 *curr_buff_cnt = cc_hash_buf_cnt(areq_ctx); in cc_map_hash_request_final()
1386 struct mlli_params *mlli_params = &areq_ctx->mlli_params; in cc_map_hash_request_final()
1393 curr_buff, *curr_buff_cnt, nbytes, src, areq_ctx->buff_index); in cc_map_hash_request_final()
1395 areq_ctx->data_dma_buf_type = CC_DMA_BUF_NULL; in cc_map_hash_request_final()
1398 areq_ctx->in_nents = 0; in cc_map_hash_request_final()
1408 if (cc_set_hash_buf(dev, areq_ctx, curr_buff, *curr_buff_cnt, in cc_map_hash_request_final()
1416 &areq_ctx->in_nents, LLI_MAX_NUM_OF_DATA_ENTRIES, in cc_map_hash_request_final()
1421 areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { in cc_map_hash_request_final()
1422 memcpy(areq_ctx->buff_sg, src, in cc_map_hash_request_final()
1424 areq_ctx->buff_sg->length = nbytes; in cc_map_hash_request_final()
1425 areq_ctx->curr_sg = areq_ctx->buff_sg; in cc_map_hash_request_final()
1426 areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; in cc_map_hash_request_final()
1428 areq_ctx->data_dma_buf_type = CC_DMA_BUF_MLLI; in cc_map_hash_request_final()
1433 if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_hash_request_final()
1436 cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, nbytes, in cc_map_hash_request_final()
1437 0, true, &areq_ctx->mlli_nents); in cc_map_hash_request_final()
1442 areq_ctx->buff_index = (areq_ctx->buff_index ^ 1); in cc_map_hash_request_final()
1444 cc_dma_buf_type(areq_ctx->data_dma_buf_type)); in cc_map_hash_request_final()
1448 dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); in cc_map_hash_request_final()
1452 dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); in cc_map_hash_request_final()
1461 struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; in cc_map_hash_request_update() local
1463 u8 *curr_buff = cc_hash_buf(areq_ctx); in cc_map_hash_request_update()
1464 u32 *curr_buff_cnt = cc_hash_buf_cnt(areq_ctx); in cc_map_hash_request_update()
1465 u8 *next_buff = cc_next_buf(areq_ctx); in cc_map_hash_request_update()
1466 u32 *next_buff_cnt = cc_next_buf_cnt(areq_ctx); in cc_map_hash_request_update()
1467 struct mlli_params *mlli_params = &areq_ctx->mlli_params; in cc_map_hash_request_update()
1477 curr_buff, *curr_buff_cnt, nbytes, src, areq_ctx->buff_index); in cc_map_hash_request_update()
1479 areq_ctx->data_dma_buf_type = CC_DMA_BUF_NULL; in cc_map_hash_request_update()
1481 areq_ctx->curr_sg = NULL; in cc_map_hash_request_update()
1483 areq_ctx->in_nents = 0; in cc_map_hash_request_update()
1488 areq_ctx->in_nents = in cc_map_hash_request_update()
1490 sg_copy_to_buffer(src, areq_ctx->in_nents, in cc_map_hash_request_update()
1517 if (cc_set_hash_buf(dev, areq_ctx, curr_buff, *curr_buff_cnt, in cc_map_hash_request_update()
1527 DMA_TO_DEVICE, &areq_ctx->in_nents, in cc_map_hash_request_update()
1533 areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { in cc_map_hash_request_update()
1535 memcpy(areq_ctx->buff_sg, src, in cc_map_hash_request_update()
1537 areq_ctx->buff_sg->length = update_data_len; in cc_map_hash_request_update()
1538 areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; in cc_map_hash_request_update()
1539 areq_ctx->curr_sg = areq_ctx->buff_sg; in cc_map_hash_request_update()
1541 areq_ctx->data_dma_buf_type = CC_DMA_BUF_MLLI; in cc_map_hash_request_update()
1545 if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_hash_request_update()
1548 cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, in cc_map_hash_request_update()
1550 &areq_ctx->mlli_nents); in cc_map_hash_request_update()
1554 areq_ctx->buff_index = (areq_ctx->buff_index ^ swap_index); in cc_map_hash_request_update()
1559 dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); in cc_map_hash_request_update()
1563 dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); in cc_map_hash_request_update()
1571 struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; in cc_unmap_hash_request() local
1572 u32 *prev_len = cc_next_buf_cnt(areq_ctx); in cc_unmap_hash_request()
1577 if (areq_ctx->mlli_params.curr_pool) { in cc_unmap_hash_request()
1579 &areq_ctx->mlli_params.mlli_dma_addr, in cc_unmap_hash_request()
1580 areq_ctx->mlli_params.mlli_virt_addr); in cc_unmap_hash_request()
1581 dma_pool_free(areq_ctx->mlli_params.curr_pool, in cc_unmap_hash_request()
1582 areq_ctx->mlli_params.mlli_virt_addr, in cc_unmap_hash_request()
1583 areq_ctx->mlli_params.mlli_dma_addr); in cc_unmap_hash_request()
1586 if (src && areq_ctx->in_nents) { in cc_unmap_hash_request()
1590 areq_ctx->in_nents, DMA_TO_DEVICE); in cc_unmap_hash_request()
1595 sg_virt(areq_ctx->buff_sg), in cc_unmap_hash_request()
1596 &sg_dma_address(areq_ctx->buff_sg), in cc_unmap_hash_request()
1597 sg_dma_len(areq_ctx->buff_sg)); in cc_unmap_hash_request()
1598 dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); in cc_unmap_hash_request()
1605 areq_ctx->buff_index ^= 1; in cc_unmap_hash_request()