Lines Matching +full:dma +full:- +full:maxburst

1 // SPDX-License-Identifier: GPL-2.0
7 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
10 * Some ideas are from omap-aes.c driver.
30 #include <linux/dma-mapping.h>
41 #include "atmel-aes-regs.h"
42 #include "atmel-authenc.h"
265 snprintf(tmp, sz, "KEYWR[%u]", (offset - AES_KEYWR(0)) >> 2); in atmel_aes_reg_name()
272 snprintf(tmp, sz, "IDATAR[%u]", (offset - AES_IDATAR(0)) >> 2); in atmel_aes_reg_name()
279 snprintf(tmp, sz, "ODATAR[%u]", (offset - AES_ODATAR(0)) >> 2); in atmel_aes_reg_name()
286 snprintf(tmp, sz, "IVR[%u]", (offset - AES_IVR(0)) >> 2); in atmel_aes_reg_name()
299 snprintf(tmp, sz, "GHASHR[%u]", (offset - AES_GHASHR(0)) >> 2); in atmel_aes_reg_name()
306 snprintf(tmp, sz, "TAGR[%u]", (offset - AES_TAGR(0)) >> 2); in atmel_aes_reg_name()
316 snprintf(tmp, sz, "GCMHR[%u]", (offset - AES_GCMHR(0)) >> 2); in atmel_aes_reg_name()
326 snprintf(tmp, sz, "TWR[%u]", (offset - AES_TWR(0)) >> 2); in atmel_aes_reg_name()
333 snprintf(tmp, sz, "ALPHAR[%u]", (offset - AES_ALPHAR(0)) >> 2); in atmel_aes_reg_name()
349 u32 value = readl_relaxed(dd->io_base + offset); in atmel_aes_read()
352 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_read()
355 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_aes_read()
367 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_write()
370 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_aes_write()
375 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
381 for (; count--; value++, offset += 4) in atmel_aes_read_n()
388 for (; count--; value++, offset += 4) in atmel_aes_write_n()
412 dd->resume = resume; in atmel_aes_wait_for_data_ready()
414 return -EINPROGRESS; in atmel_aes_wait_for_data_ready()
419 len &= block_size - 1; in atmel_aes_padlen()
420 return len ? block_size - len : 0; in atmel_aes_padlen()
439 err = clk_enable(dd->iclk); in atmel_aes_hw_init()
462 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
464 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
466 clk_disable(dd->iclk); in atmel_aes_hw_version_init()
474 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; in atmel_aes_set_mode()
479 return (dd->flags & AES_FLAGS_ENCRYPT); in atmel_aes_is_encrypt()
488 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_set_iv_as_last_ciphertext_block()
493 if (req->cryptlen < ivsize) in atmel_aes_set_iv_as_last_ciphertext_block()
496 if (rctx->mode & AES_FLAGS_ENCRYPT) { in atmel_aes_set_iv_as_last_ciphertext_block()
497 scatterwalk_map_and_copy(req->iv, req->dst, in atmel_aes_set_iv_as_last_ciphertext_block()
498 req->cryptlen - ivsize, ivsize, 0); in atmel_aes_set_iv_as_last_ciphertext_block()
500 if (req->src == req->dst) in atmel_aes_set_iv_as_last_ciphertext_block()
501 memcpy(req->iv, rctx->lastc, ivsize); in atmel_aes_set_iv_as_last_ciphertext_block()
503 scatterwalk_map_and_copy(req->iv, req->src, in atmel_aes_set_iv_as_last_ciphertext_block()
504 req->cryptlen - ivsize, in atmel_aes_set_iv_as_last_ciphertext_block()
517 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_update_req_iv()
518 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_update_req_iv()
526 * here, ctx->blocks contains the number of blocks of the last fragment in atmel_aes_ctr_update_req_iv()
529 for (i = 0; i < ctx->blocks; i++) in atmel_aes_ctr_update_req_iv()
530 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_update_req_iv()
532 memcpy(req->iv, ctx->iv, ivsize); in atmel_aes_ctr_update_req_iv()
537 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_complete()
541 if (dd->ctx->is_aead) in atmel_aes_complete()
545 clk_disable(dd->iclk); in atmel_aes_complete()
546 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_complete()
548 if (!err && !dd->ctx->is_aead && in atmel_aes_complete()
549 (rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB) { in atmel_aes_complete()
550 if ((rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_CTR) in atmel_aes_complete()
556 if (dd->is_async) in atmel_aes_complete()
557 dd->areq->complete(dd->areq, err); in atmel_aes_complete()
559 tasklet_schedule(&dd->queue_task); in atmel_aes_complete()
577 valmr |= dd->flags & AES_FLAGS_MODE_MASK; in atmel_aes_write_ctrl_key()
581 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl_key()
600 dd->ctx->key, dd->ctx->keylen); in atmel_aes_write_ctrl()
611 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data); in atmel_aes_cpu_transfer()
612 dd->data += 4; in atmel_aes_cpu_transfer()
613 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
615 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
618 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_transfer()
622 dd->resume = atmel_aes_cpu_transfer; in atmel_aes_cpu_transfer()
624 return -EINPROGRESS; in atmel_aes_cpu_transfer()
628 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_cpu_transfer()
629 dd->buf, dd->total)) in atmel_aes_cpu_transfer()
630 err = -EINVAL; in atmel_aes_cpu_transfer()
635 return dd->cpu_transfer_complete(dd); in atmel_aes_cpu_transfer()
647 return -EINVAL; in atmel_aes_cpu_start()
649 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_cpu_start()
651 dd->total = len; in atmel_aes_cpu_start()
652 dd->real_dst = dst; in atmel_aes_cpu_start()
653 dd->cpu_transfer_complete = resume; in atmel_aes_cpu_start()
654 dd->datalen = len + padlen; in atmel_aes_cpu_start()
655 dd->data = (u32 *)dd->buf; in atmel_aes_cpu_start()
656 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_start()
661 /* DMA transfer */
668 struct atmel_aes_dma *dma) in atmel_aes_check_aligned() argument
672 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
676 if (!IS_ALIGNED(sg->offset, sizeof(u32))) in atmel_aes_check_aligned()
679 if (len <= sg->length) { in atmel_aes_check_aligned()
680 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
683 dma->nents = nents+1; in atmel_aes_check_aligned()
684 dma->remainder = sg->length - len; in atmel_aes_check_aligned()
685 sg->length = len; in atmel_aes_check_aligned()
689 if (!IS_ALIGNED(sg->length, dd->ctx->block_size)) in atmel_aes_check_aligned()
692 len -= sg->length; in atmel_aes_check_aligned()
698 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma *dma) in atmel_aes_restore_sg() argument
700 struct scatterlist *sg = dma->sg; in atmel_aes_restore_sg()
701 int nents = dma->nents; in atmel_aes_restore_sg()
703 if (!dma->remainder) in atmel_aes_restore_sg()
706 while (--nents > 0 && sg) in atmel_aes_restore_sg()
712 sg->length += dma->remainder; in atmel_aes_restore_sg()
723 dd->total = len; in atmel_aes_map()
724 dd->src.sg = src; in atmel_aes_map()
725 dd->dst.sg = dst; in atmel_aes_map()
726 dd->real_dst = dst; in atmel_aes_map()
728 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src); in atmel_aes_map()
732 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); in atmel_aes_map()
734 padlen = atmel_aes_padlen(len, dd->ctx->block_size); in atmel_aes_map()
736 if (dd->buflen < len + padlen) in atmel_aes_map()
737 return -ENOMEM; in atmel_aes_map()
740 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_map()
741 dd->src.sg = &dd->aligned_sg; in atmel_aes_map()
742 dd->src.nents = 1; in atmel_aes_map()
743 dd->src.remainder = 0; in atmel_aes_map()
747 dd->dst.sg = &dd->aligned_sg; in atmel_aes_map()
748 dd->dst.nents = 1; in atmel_aes_map()
749 dd->dst.remainder = 0; in atmel_aes_map()
752 sg_init_table(&dd->aligned_sg, 1); in atmel_aes_map()
753 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen); in atmel_aes_map()
756 if (dd->src.sg == dd->dst.sg) { in atmel_aes_map()
757 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
759 dd->dst.sg_len = dd->src.sg_len; in atmel_aes_map()
760 if (!dd->src.sg_len) in atmel_aes_map()
761 return -EFAULT; in atmel_aes_map()
763 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
765 if (!dd->src.sg_len) in atmel_aes_map()
766 return -EFAULT; in atmel_aes_map()
768 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_map()
770 if (!dd->dst.sg_len) { in atmel_aes_map()
771 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
773 return -EFAULT; in atmel_aes_map()
782 if (dd->src.sg == dd->dst.sg) { in atmel_aes_unmap()
783 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
786 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
787 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
789 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_unmap()
792 if (dd->dst.sg != &dd->aligned_sg) in atmel_aes_unmap()
793 atmel_aes_restore_sg(&dd->dst); in atmel_aes_unmap()
795 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
798 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
799 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
802 if (dd->dst.sg == &dd->aligned_sg) in atmel_aes_unmap()
803 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_unmap()
804 dd->buf, dd->total); in atmel_aes_unmap()
810 u32 maxburst) in atmel_aes_dma_transfer_start() argument
815 struct atmel_aes_dma *dma; in atmel_aes_dma_transfer_start() local
821 config.src_maxburst = maxburst; in atmel_aes_dma_transfer_start()
822 config.dst_maxburst = maxburst; in atmel_aes_dma_transfer_start()
826 dma = &dd->src; in atmel_aes_dma_transfer_start()
828 config.dst_addr = dd->phys_base + AES_IDATAR(0); in atmel_aes_dma_transfer_start()
832 dma = &dd->dst; in atmel_aes_dma_transfer_start()
834 config.src_addr = dd->phys_base + AES_ODATAR(0); in atmel_aes_dma_transfer_start()
838 return -EINVAL; in atmel_aes_dma_transfer_start()
841 err = dmaengine_slave_config(dma->chan, &config); in atmel_aes_dma_transfer_start()
845 desc = dmaengine_prep_slave_sg(dma->chan, dma->sg, dma->sg_len, dir, in atmel_aes_dma_transfer_start()
848 return -ENOMEM; in atmel_aes_dma_transfer_start()
850 desc->callback = callback; in atmel_aes_dma_transfer_start()
851 desc->callback_param = dd; in atmel_aes_dma_transfer_start()
853 dma_async_issue_pending(dma->chan); in atmel_aes_dma_transfer_start()
865 u32 maxburst; in atmel_aes_dma_start() local
868 switch (dd->ctx->block_size) { in atmel_aes_dma_start()
871 maxburst = 1; in atmel_aes_dma_start()
876 maxburst = 1; in atmel_aes_dma_start()
882 maxburst = 1; in atmel_aes_dma_start()
887 maxburst = dd->caps.max_burst_size; in atmel_aes_dma_start()
891 err = -EINVAL; in atmel_aes_dma_start()
899 dd->resume = resume; in atmel_aes_dma_start()
901 /* Set output DMA transfer first */ in atmel_aes_dma_start()
903 maxburst); in atmel_aes_dma_start()
907 /* Then set input DMA transfer */ in atmel_aes_dma_start()
909 maxburst); in atmel_aes_dma_start()
913 return -EINPROGRESS; in atmel_aes_dma_start()
916 dmaengine_terminate_sync(dd->dst.chan); in atmel_aes_dma_start()
928 dd->is_async = true; in atmel_aes_dma_callback()
929 (void)dd->resume(dd); in atmel_aes_dma_callback()
941 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
943 ret = crypto_enqueue_request(&dd->queue, new_areq); in atmel_aes_handle_queue()
944 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
945 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
948 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
949 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
951 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
952 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
958 backlog->complete(backlog, -EINPROGRESS); in atmel_aes_handle_queue()
960 ctx = crypto_tfm_ctx(areq->tfm); in atmel_aes_handle_queue()
962 dd->areq = areq; in atmel_aes_handle_queue()
964 dd->is_async = start_async; in atmel_aes_handle_queue()
966 /* WARNING: ctx->start() MAY change dd->is_async. */ in atmel_aes_handle_queue()
967 err = ctx->start(dd); in atmel_aes_handle_queue()
981 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_start()
983 bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD || in atmel_aes_start()
984 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
993 atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv); in atmel_aes_start()
995 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_start()
996 req->cryptlen, in atmel_aes_start()
999 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_start()
1005 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_transfer()
1006 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_transfer()
1014 ctx->offset += dd->total; in atmel_aes_ctr_transfer()
1015 if (ctx->offset >= req->cryptlen) in atmel_aes_ctr_transfer()
1019 datalen = req->cryptlen - ctx->offset; in atmel_aes_ctr_transfer()
1020 ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE); in atmel_aes_ctr_transfer()
1021 ctr = be32_to_cpu(ctx->iv[3]); in atmel_aes_ctr_transfer()
1025 end = start + ctx->blocks - 1; in atmel_aes_ctr_transfer()
1027 if (ctx->blocks >> 16 || end < start) { in atmel_aes_ctr_transfer()
1029 datalen = AES_BLOCK_SIZE * (0x10000 - start); in atmel_aes_ctr_transfer()
1036 src = scatterwalk_ffwd(ctx->src, req->src, ctx->offset); in atmel_aes_ctr_transfer()
1037 dst = ((req->src == req->dst) ? src : in atmel_aes_ctr_transfer()
1038 scatterwalk_ffwd(ctx->dst, req->dst, ctx->offset)); in atmel_aes_ctr_transfer()
1041 atmel_aes_write_ctrl(dd, use_dma, ctx->iv); in atmel_aes_ctr_transfer()
1047 ctx->iv[3] = cpu_to_be32(ctr); in atmel_aes_ctr_transfer()
1048 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_transfer()
1061 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_start()
1062 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_start()
1072 memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_start()
1073 ctx->offset = 0; in atmel_aes_ctr_start()
1074 dd->total = 0; in atmel_aes_ctr_start()
1084 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in atmel_aes_xts_fallback()
1085 skcipher_request_set_callback(&rctx->fallback_req, req->base.flags, in atmel_aes_xts_fallback()
1086 req->base.complete, req->base.data); in atmel_aes_xts_fallback()
1087 skcipher_request_set_crypt(&rctx->fallback_req, req->src, req->dst, in atmel_aes_xts_fallback()
1088 req->cryptlen, req->iv); in atmel_aes_xts_fallback()
1090 return enc ? crypto_skcipher_encrypt(&rctx->fallback_req) : in atmel_aes_xts_fallback()
1091 crypto_skcipher_decrypt(&rctx->fallback_req); in atmel_aes_xts_fallback()
1102 if (req->cryptlen < XTS_BLOCK_SIZE) in atmel_aes_crypt()
1103 return -EINVAL; in atmel_aes_crypt()
1105 if (!IS_ALIGNED(req->cryptlen, XTS_BLOCK_SIZE)) in atmel_aes_crypt()
1114 if (!req->cryptlen && opmode != AES_FLAGS_XTS) in atmel_aes_crypt()
1118 !IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(skcipher))) in atmel_aes_crypt()
1119 return -EINVAL; in atmel_aes_crypt()
1123 ctx->block_size = CFB8_BLOCK_SIZE; in atmel_aes_crypt()
1127 ctx->block_size = CFB16_BLOCK_SIZE; in atmel_aes_crypt()
1131 ctx->block_size = CFB32_BLOCK_SIZE; in atmel_aes_crypt()
1135 ctx->block_size = CFB64_BLOCK_SIZE; in atmel_aes_crypt()
1139 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_crypt()
1142 ctx->is_aead = false; in atmel_aes_crypt()
1145 rctx->mode = mode; in atmel_aes_crypt()
1148 !(mode & AES_FLAGS_ENCRYPT) && req->src == req->dst) { in atmel_aes_crypt()
1151 if (req->cryptlen >= ivsize) in atmel_aes_crypt()
1152 scatterwalk_map_and_copy(rctx->lastc, req->src, in atmel_aes_crypt()
1153 req->cryptlen - ivsize, in atmel_aes_crypt()
1157 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_crypt()
1168 return -EINVAL; in atmel_aes_setkey()
1170 memcpy(ctx->key, key, keylen); in atmel_aes_setkey()
1171 ctx->keylen = keylen; in atmel_aes_setkey()
1271 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_init_tfm()
1273 return -ENODEV; in atmel_aes_init_tfm()
1276 ctx->base.dd = dd; in atmel_aes_init_tfm()
1277 ctx->base.dd->ctx = &ctx->base; in atmel_aes_init_tfm()
1278 ctx->base.start = atmel_aes_start; in atmel_aes_init_tfm()
1288 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_ctr_init_tfm()
1290 return -ENODEV; in atmel_aes_ctr_init_tfm()
1293 ctx->base.dd = dd; in atmel_aes_ctr_init_tfm()
1294 ctx->base.dd->ctx = &ctx->base; in atmel_aes_ctr_init_tfm()
1295 ctx->base.start = atmel_aes_ctr_start; in atmel_aes_ctr_init_tfm()
1303 .base.cra_driver_name = "atmel-ecb-aes",
1316 .base.cra_driver_name = "atmel-cbc-aes",
1330 .base.cra_driver_name = "atmel-ofb-aes",
1344 .base.cra_driver_name = "atmel-cfb-aes",
1358 .base.cra_driver_name = "atmel-cfb32-aes",
1372 .base.cra_driver_name = "atmel-cfb16-aes",
1386 .base.cra_driver_name = "atmel-cfb8-aes",
1400 .base.cra_driver_name = "atmel-ctr-aes",
1416 .base.cra_driver_name = "atmel-cfb64-aes",
1458 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash()
1460 dd->data = (u32 *)data; in atmel_aes_gcm_ghash()
1461 dd->datalen = datalen; in atmel_aes_gcm_ghash()
1462 ctx->ghash_in = ghash_in; in atmel_aes_gcm_ghash()
1463 ctx->ghash_out = ghash_out; in atmel_aes_gcm_ghash()
1464 ctx->ghash_resume = resume; in atmel_aes_gcm_ghash()
1472 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_init()
1475 atmel_aes_write(dd, AES_AADLENR, dd->total); in atmel_aes_gcm_ghash_init()
1479 if (ctx->ghash_in) in atmel_aes_gcm_ghash_init()
1480 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in); in atmel_aes_gcm_ghash_init()
1487 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_finalize()
1491 while (dd->datalen > 0) { in atmel_aes_gcm_ghash_finalize()
1492 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_ghash_finalize()
1493 dd->data += 4; in atmel_aes_gcm_ghash_finalize()
1494 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1498 dd->resume = atmel_aes_gcm_ghash_finalize; in atmel_aes_gcm_ghash_finalize()
1500 return -EINPROGRESS; in atmel_aes_gcm_ghash_finalize()
1505 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out); in atmel_aes_gcm_ghash_finalize()
1507 return ctx->ghash_resume(dd); in atmel_aes_gcm_ghash_finalize()
1513 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_start()
1514 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_start()
1519 const void *iv = req->iv; in atmel_aes_gcm_start()
1520 u8 *data = dd->buf; in atmel_aes_gcm_start()
1530 memcpy(ctx->j0, iv, ivsize); in atmel_aes_gcm_start()
1531 ctx->j0[3] = cpu_to_be32(1); in atmel_aes_gcm_start()
1537 if (datalen > dd->buflen) in atmel_aes_gcm_start()
1538 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_start()
1542 ((__be64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8); in atmel_aes_gcm_start()
1545 NULL, ctx->j0, atmel_aes_gcm_process); in atmel_aes_gcm_start()
1550 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_process()
1551 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_process()
1558 ctx->textlen = req->cryptlen - (enc ? 0 : authsize); in atmel_aes_gcm_process()
1564 if (likely(req->assoclen != 0 || ctx->textlen != 0)) in atmel_aes_gcm_process()
1565 dd->flags |= AES_FLAGS_GTAGEN; in atmel_aes_gcm_process()
1573 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_length()
1574 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_length()
1575 __be32 j0_lsw, *j0 = ctx->j0; in atmel_aes_gcm_length()
1585 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length()
1586 atmel_aes_write(dd, AES_CLENR, ctx->textlen); in atmel_aes_gcm_length()
1589 if (unlikely(req->assoclen == 0)) { in atmel_aes_gcm_length()
1590 dd->datalen = 0; in atmel_aes_gcm_length()
1595 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE); in atmel_aes_gcm_length()
1596 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length()
1597 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_length()
1598 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length()
1601 dd->data = (u32 *)dd->buf; in atmel_aes_gcm_length()
1602 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length()
1608 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_data()
1609 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_data()
1610 bool use_dma = (ctx->textlen >= ATMEL_AES_DMA_THRESHOLD); in atmel_aes_gcm_data()
1615 while (dd->datalen > 0) { in atmel_aes_gcm_data()
1616 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_data()
1617 dd->data += 4; in atmel_aes_gcm_data()
1618 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1622 dd->resume = atmel_aes_gcm_data; in atmel_aes_gcm_data()
1624 return -EINPROGRESS; in atmel_aes_gcm_data()
1629 if (unlikely(ctx->textlen == 0)) in atmel_aes_gcm_data()
1633 src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen); in atmel_aes_gcm_data()
1634 dst = ((req->src == req->dst) ? src : in atmel_aes_gcm_data()
1635 scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen)); in atmel_aes_gcm_data()
1638 /* Update the Mode Register for DMA transfers. */ in atmel_aes_gcm_data()
1642 if (dd->caps.has_dualbuff) in atmel_aes_gcm_data()
1646 return atmel_aes_dma_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1650 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1656 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag_init()
1657 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_tag_init()
1658 __be64 *data = dd->buf; in atmel_aes_gcm_tag_init()
1660 if (likely(dd->flags & AES_FLAGS_GTAGEN)) { in atmel_aes_gcm_tag_init()
1662 dd->resume = atmel_aes_gcm_tag_init; in atmel_aes_gcm_tag_init()
1664 return -EINPROGRESS; in atmel_aes_gcm_tag_init()
1671 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash); in atmel_aes_gcm_tag_init()
1673 data[0] = cpu_to_be64(req->assoclen * 8); in atmel_aes_gcm_tag_init()
1674 data[1] = cpu_to_be64(ctx->textlen * 8); in atmel_aes_gcm_tag_init()
1677 ctx->ghash, ctx->ghash, atmel_aes_gcm_tag); in atmel_aes_gcm_tag_init()
1682 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag()
1689 flags = dd->flags; in atmel_aes_gcm_tag()
1690 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN); in atmel_aes_gcm_tag()
1691 dd->flags |= AES_FLAGS_CTR; in atmel_aes_gcm_tag()
1692 atmel_aes_write_ctrl(dd, false, ctx->j0); in atmel_aes_gcm_tag()
1693 dd->flags = flags; in atmel_aes_gcm_tag()
1695 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash); in atmel_aes_gcm_tag()
1701 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_finalize()
1702 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_finalize()
1705 u32 offset, authsize, itag[4], *otag = ctx->tag; in atmel_aes_gcm_finalize()
1709 if (likely(dd->flags & AES_FLAGS_GTAGEN)) in atmel_aes_gcm_finalize()
1710 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag); in atmel_aes_gcm_finalize()
1712 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag); in atmel_aes_gcm_finalize()
1714 offset = req->assoclen + ctx->textlen; in atmel_aes_gcm_finalize()
1717 scatterwalk_map_and_copy(otag, req->dst, offset, authsize, 1); in atmel_aes_gcm_finalize()
1720 scatterwalk_map_and_copy(itag, req->src, offset, authsize, 0); in atmel_aes_gcm_finalize()
1721 err = crypto_memneq(itag, otag, authsize) ? -EBADMSG : 0; in atmel_aes_gcm_finalize()
1734 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_gcm_crypt()
1735 ctx->is_aead = true; in atmel_aes_gcm_crypt()
1738 rctx->mode = AES_FLAGS_GCM | mode; in atmel_aes_gcm_crypt()
1740 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_gcm_crypt()
1751 return -EINVAL; in atmel_aes_gcm_setkey()
1753 memcpy(ctx->key, key, keylen); in atmel_aes_gcm_setkey()
1754 ctx->keylen = keylen; in atmel_aes_gcm_setkey()
1780 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_gcm_init()
1782 return -ENODEV; in atmel_aes_gcm_init()
1785 ctx->base.dd = dd; in atmel_aes_gcm_init()
1786 ctx->base.dd->ctx = &ctx->base; in atmel_aes_gcm_init()
1787 ctx->base.start = atmel_aes_gcm_start; in atmel_aes_gcm_init()
1803 .cra_driver_name = "atmel-gcm-aes",
1822 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); in atmel_aes_xts_start()
1823 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_start()
1834 /* Compute the tweak value from req->iv with ecb(aes). */ in atmel_aes_xts_start()
1835 flags = dd->flags; in atmel_aes_xts_start()
1836 dd->flags &= ~AES_FLAGS_MODE_MASK; in atmel_aes_xts_start()
1837 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); in atmel_aes_xts_start()
1839 ctx->key2, ctx->base.keylen); in atmel_aes_xts_start()
1840 dd->flags = flags; in atmel_aes_xts_start()
1842 atmel_aes_write_block(dd, AES_IDATAR(0), req->iv); in atmel_aes_xts_start()
1848 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_process_data()
1849 bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD); in atmel_aes_xts_process_data()
1863 swap(tweak_bytes[i], tweak_bytes[AES_BLOCK_SIZE - 1 - i]); in atmel_aes_xts_process_data()
1870 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_xts_process_data()
1871 req->cryptlen, in atmel_aes_xts_process_data()
1874 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_xts_process_data()
1888 crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK); in atmel_aes_xts_setkey()
1889 crypto_skcipher_set_flags(ctx->fallback_tfm, tfm->base.crt_flags & in atmel_aes_xts_setkey()
1891 err = crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen); in atmel_aes_xts_setkey()
1895 memcpy(ctx->base.key, key, keylen/2); in atmel_aes_xts_setkey()
1896 memcpy(ctx->key2, key + keylen/2, keylen/2); in atmel_aes_xts_setkey()
1897 ctx->base.keylen = keylen/2; in atmel_aes_xts_setkey()
1916 const char *tfm_name = crypto_tfm_alg_name(&tfm->base); in atmel_aes_xts_init_tfm()
1918 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_xts_init_tfm()
1920 return -ENODEV; in atmel_aes_xts_init_tfm()
1922 ctx->fallback_tfm = crypto_alloc_skcipher(tfm_name, 0, in atmel_aes_xts_init_tfm()
1924 if (IS_ERR(ctx->fallback_tfm)) in atmel_aes_xts_init_tfm()
1925 return PTR_ERR(ctx->fallback_tfm); in atmel_aes_xts_init_tfm()
1928 crypto_skcipher_reqsize(ctx->fallback_tfm)); in atmel_aes_xts_init_tfm()
1929 ctx->base.dd = dd; in atmel_aes_xts_init_tfm()
1930 ctx->base.dd->ctx = &ctx->base; in atmel_aes_xts_init_tfm()
1931 ctx->base.start = atmel_aes_xts_start; in atmel_aes_xts_init_tfm()
1940 crypto_free_skcipher(ctx->fallback_tfm); in atmel_aes_xts_exit_tfm()
1945 .base.cra_driver_name = "atmel-xts-aes",
1974 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_complete()
1977 if (err && (dd->flags & AES_FLAGS_OWN_SHA)) in atmel_aes_authenc_complete()
1978 atmel_sha_authenc_abort(&rctx->auth_req); in atmel_aes_authenc_complete()
1979 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_complete()
1984 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_start()
1990 atmel_aes_set_mode(dd, &rctx->base); in atmel_aes_authenc_start()
1996 return atmel_sha_authenc_schedule(&rctx->auth_req, ctx->auth, in atmel_aes_authenc_start()
2003 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_init()
2007 dd->is_async = true; in atmel_aes_authenc_init()
2012 dd->flags |= AES_FLAGS_OWN_SHA; in atmel_aes_authenc_init()
2015 return atmel_sha_authenc_init(&rctx->auth_req, in atmel_aes_authenc_init()
2016 req->src, req->assoclen, in atmel_aes_authenc_init()
2017 rctx->textlen, in atmel_aes_authenc_init()
2024 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_transfer()
2032 dd->is_async = true; in atmel_aes_authenc_transfer()
2036 /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */ in atmel_aes_authenc_transfer()
2037 src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); in atmel_aes_authenc_transfer()
2040 if (req->src != req->dst) in atmel_aes_authenc_transfer()
2041 dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); in atmel_aes_authenc_transfer()
2044 memcpy(iv, req->iv, sizeof(iv)); in atmel_aes_authenc_transfer()
2049 * not by the DMA) because we must force the AES_MR_SMOD bitfield to the in atmel_aes_authenc_transfer()
2060 return atmel_aes_dma_start(dd, src, dst, rctx->textlen, in atmel_aes_authenc_transfer()
2066 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_digest()
2070 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_digest()
2071 return atmel_sha_authenc_final(&rctx->auth_req, in atmel_aes_authenc_digest()
2072 rctx->digest, sizeof(rctx->digest), in atmel_aes_authenc_digest()
2079 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_final()
2083 u32 idigest[SHA512_DIGEST_SIZE / sizeof(u32)], *odigest = rctx->digest; in atmel_aes_authenc_final()
2087 dd->is_async = true; in atmel_aes_authenc_final()
2091 offs = req->assoclen + rctx->textlen; in atmel_aes_authenc_final()
2094 scatterwalk_map_and_copy(odigest, req->dst, offs, authsize, 1); in atmel_aes_authenc_final()
2096 scatterwalk_map_and_copy(idigest, req->src, offs, authsize, 0); in atmel_aes_authenc_final()
2098 err = -EBADMSG; in atmel_aes_authenc_final()
2115 if (keys.enckeylen > sizeof(ctx->base.key)) in atmel_aes_authenc_setkey()
2119 err = atmel_sha_authenc_setkey(ctx->auth, in atmel_aes_authenc_setkey()
2128 ctx->base.keylen = keys.enckeylen; in atmel_aes_authenc_setkey()
2129 memcpy(ctx->base.key, keys.enckey, keys.enckeylen); in atmel_aes_authenc_setkey()
2136 return -EINVAL; in atmel_aes_authenc_setkey()
2146 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_authenc_init_tfm()
2148 return -ENODEV; in atmel_aes_authenc_init_tfm()
2150 ctx->auth = atmel_sha_authenc_spawn(auth_mode); in atmel_aes_authenc_init_tfm()
2151 if (IS_ERR(ctx->auth)) in atmel_aes_authenc_init_tfm()
2152 return PTR_ERR(ctx->auth); in atmel_aes_authenc_init_tfm()
2156 ctx->base.dd = dd; in atmel_aes_authenc_init_tfm()
2157 ctx->base.dd->ctx = &ctx->base; in atmel_aes_authenc_init_tfm()
2158 ctx->base.start = atmel_aes_authenc_start; in atmel_aes_authenc_init_tfm()
2192 atmel_sha_authenc_free(ctx->auth); in atmel_aes_authenc_exit_tfm()
2205 if (!enc && req->cryptlen < authsize) in atmel_aes_authenc_crypt()
2206 return -EINVAL; in atmel_aes_authenc_crypt()
2207 rctx->textlen = req->cryptlen - (enc ? 0 : authsize); in atmel_aes_authenc_crypt()
2211 * the SHA auto-padding can be used only on non-empty messages. in atmel_aes_authenc_crypt()
2214 if (!rctx->textlen && !req->assoclen) in atmel_aes_authenc_crypt()
2215 return -EINVAL; in atmel_aes_authenc_crypt()
2217 rctx->base.mode = mode; in atmel_aes_authenc_crypt()
2218 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_authenc_crypt()
2219 ctx->is_aead = true; in atmel_aes_authenc_crypt()
2221 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_authenc_crypt()
2246 .cra_driver_name = "atmel-authenc-hmac-sha1-cbc-aes",
2262 .cra_driver_name = "atmel-authenc-hmac-sha224-cbc-aes",
2278 .cra_driver_name = "atmel-authenc-hmac-sha256-cbc-aes",
2294 .cra_driver_name = "atmel-authenc-hmac-sha384-cbc-aes",
2310 .cra_driver_name = "atmel-authenc-hmac-sha512-cbc-aes",
2322 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER); in atmel_aes_buff_init()
2323 dd->buflen = ATMEL_AES_BUFFER_SIZE; in atmel_aes_buff_init()
2324 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
2326 if (!dd->buf) { in atmel_aes_buff_init()
2327 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
2328 return -ENOMEM; in atmel_aes_buff_init()
2336 free_page((unsigned long)dd->buf); in atmel_aes_buff_cleanup()
2343 /* Try to grab 2 DMA channels */ in atmel_aes_dma_init()
2344 dd->src.chan = dma_request_chan(dd->dev, "tx"); in atmel_aes_dma_init()
2345 if (IS_ERR(dd->src.chan)) { in atmel_aes_dma_init()
2346 ret = PTR_ERR(dd->src.chan); in atmel_aes_dma_init()
2350 dd->dst.chan = dma_request_chan(dd->dev, "rx"); in atmel_aes_dma_init()
2351 if (IS_ERR(dd->dst.chan)) { in atmel_aes_dma_init()
2352 ret = PTR_ERR(dd->dst.chan); in atmel_aes_dma_init()
2359 dma_release_channel(dd->src.chan); in atmel_aes_dma_init()
2361 dev_err(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
2367 dma_release_channel(dd->dst.chan); in atmel_aes_dma_cleanup()
2368 dma_release_channel(dd->src.chan); in atmel_aes_dma_cleanup()
2382 dd->is_async = true; in atmel_aes_done_task()
2383 (void)dd->resume(dd); in atmel_aes_done_task()
2394 if (AES_FLAGS_BUSY & aes_dd->flags) in atmel_aes_irq()
2395 tasklet_schedule(&aes_dd->done_task); in atmel_aes_irq()
2397 dev_warn(aes_dd->dev, "AES interrupt when no active requests.\n"); in atmel_aes_irq()
2409 if (dd->caps.has_authenc) in atmel_aes_unregister_algs()
2414 if (dd->caps.has_xts) in atmel_aes_unregister_algs()
2417 if (dd->caps.has_gcm) in atmel_aes_unregister_algs()
2420 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
2429 alg->cra_flags |= CRYPTO_ALG_ASYNC; in atmel_aes_crypto_alg_init()
2430 alg->cra_alignmask = 0xf; in atmel_aes_crypto_alg_init()
2431 alg->cra_priority = ATMEL_AES_PRIORITY; in atmel_aes_crypto_alg_init()
2432 alg->cra_module = THIS_MODULE; in atmel_aes_crypto_alg_init()
2447 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
2455 if (dd->caps.has_gcm) { in atmel_aes_register_algs()
2463 if (dd->caps.has_xts) { in atmel_aes_register_algs()
2472 if (dd->caps.has_authenc) { in atmel_aes_register_algs()
2507 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
2508 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
2509 dd->caps.has_gcm = 0; in atmel_aes_get_cap()
2510 dd->caps.has_xts = 0; in atmel_aes_get_cap()
2511 dd->caps.has_authenc = 0; in atmel_aes_get_cap()
2512 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
2515 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
2517 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2518 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2519 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2520 dd->caps.has_xts = 1; in atmel_aes_get_cap()
2521 dd->caps.has_authenc = 1; in atmel_aes_get_cap()
2522 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2525 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2526 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2527 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2528 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2531 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2532 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2533 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2538 dev_warn(dd->dev, in atmel_aes_get_cap()
2546 { .compatible = "atmel,at91sam9g46-aes" },
2555 struct device *dev = &pdev->dev; in atmel_aes_probe()
2559 aes_dd = devm_kzalloc(&pdev->dev, sizeof(*aes_dd), GFP_KERNEL); in atmel_aes_probe()
2561 return -ENOMEM; in atmel_aes_probe()
2563 aes_dd->dev = dev; in atmel_aes_probe()
2567 INIT_LIST_HEAD(&aes_dd->list); in atmel_aes_probe()
2568 spin_lock_init(&aes_dd->lock); in atmel_aes_probe()
2570 tasklet_init(&aes_dd->done_task, atmel_aes_done_task, in atmel_aes_probe()
2572 tasklet_init(&aes_dd->queue_task, atmel_aes_queue_task, in atmel_aes_probe()
2575 crypto_init_queue(&aes_dd->queue, ATMEL_AES_QUEUE_LENGTH); in atmel_aes_probe()
2581 err = -ENODEV; in atmel_aes_probe()
2584 aes_dd->phys_base = aes_res->start; in atmel_aes_probe()
2587 aes_dd->irq = platform_get_irq(pdev, 0); in atmel_aes_probe()
2588 if (aes_dd->irq < 0) { in atmel_aes_probe()
2589 err = aes_dd->irq; in atmel_aes_probe()
2593 err = devm_request_irq(&pdev->dev, aes_dd->irq, atmel_aes_irq, in atmel_aes_probe()
2594 IRQF_SHARED, "atmel-aes", aes_dd); in atmel_aes_probe()
2601 aes_dd->iclk = devm_clk_get(&pdev->dev, "aes_clk"); in atmel_aes_probe()
2602 if (IS_ERR(aes_dd->iclk)) { in atmel_aes_probe()
2604 err = PTR_ERR(aes_dd->iclk); in atmel_aes_probe()
2608 aes_dd->io_base = devm_ioremap_resource(&pdev->dev, aes_res); in atmel_aes_probe()
2609 if (IS_ERR(aes_dd->io_base)) { in atmel_aes_probe()
2611 err = PTR_ERR(aes_dd->io_base); in atmel_aes_probe()
2615 err = clk_prepare(aes_dd->iclk); in atmel_aes_probe()
2626 if (aes_dd->caps.has_authenc && !atmel_sha_authenc_is_ready()) { in atmel_aes_probe()
2627 err = -EPROBE_DEFER; in atmel_aes_probe()
2641 list_add_tail(&aes_dd->list, &atmel_aes.dev_list); in atmel_aes_probe()
2648 dev_info(dev, "Atmel AES - Using %s, %s for DMA transfers\n", in atmel_aes_probe()
2649 dma_chan_name(aes_dd->src.chan), in atmel_aes_probe()
2650 dma_chan_name(aes_dd->dst.chan)); in atmel_aes_probe()
2656 list_del(&aes_dd->list); in atmel_aes_probe()
2662 clk_unprepare(aes_dd->iclk); in atmel_aes_probe()
2664 tasklet_kill(&aes_dd->done_task); in atmel_aes_probe()
2665 tasklet_kill(&aes_dd->queue_task); in atmel_aes_probe()
2676 return -ENODEV; in atmel_aes_remove()
2678 list_del(&aes_dd->list); in atmel_aes_remove()
2683 tasklet_kill(&aes_dd->done_task); in atmel_aes_remove()
2684 tasklet_kill(&aes_dd->queue_task); in atmel_aes_remove()
2689 clk_unprepare(aes_dd->iclk); in atmel_aes_remove()
2707 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");