Lines Matching +full:imx27 +full:- +full:sahara

1 // SPDX-License-Identifier: GPL-2.0-only
5 * Support for SAHARA cryptographic accelerator.
9 * Author: Javier Martin <javier.martin@vista-silicon.com>
11 * Based on omap-aes.c and tegra-aes.c
22 #include <linux/dma-mapping.h>
37 #define SAHARA_NAME "sahara"
78 /* SAHARA can only process one request at a time */
147 /* AES-specific context */
159 * struct sahara_sha_reqctx - private data per request
161 * @rembuf: used to prepare one block_size-aligned request
162 * @context: hw-specific context for request. Digest is extracted from this
163 * @mode: specifies what type of hw-descriptor needs to be built
165 * @context_size: length of hw-context for this request.
235 writel(data, dev->regs_base + reg); in sahara_write()
240 return readl(dev->regs_base + reg); in sahara_read()
249 if (dev->flags & FLAGS_CBC) { in sahara_aes_key_hdr()
254 if (dev->flags & FLAGS_ENCRYPT) { in sahara_aes_key_hdr()
289 "Half-word transfer",
306 "Input buffer non-empty",
327 dev_err(dev->device, "%s: Error Register = 0x%08x\n", __func__, error); in sahara_decode_error()
329 dev_err(dev->device, " - %s.\n", sahara_err_src[source]); in sahara_decode_error()
333 dev_err(dev->device, " * DMA read.\n"); in sahara_decode_error()
335 dev_err(dev->device, " * DMA write.\n"); in sahara_decode_error()
337 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
339 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
342 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
344 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
347 dev_err(dev->device, "\n"); in sahara_decode_error()
361 dev_dbg(dev->device, "%s: Status Register = 0x%08x\n", in sahara_decode_status()
364 dev_dbg(dev->device, " - State = %d:\n", state); in sahara_decode_status()
366 dev_dbg(dev->device, " * Descriptor completed. IRQ pending.\n"); in sahara_decode_status()
368 dev_dbg(dev->device, " * %s.\n", in sahara_decode_status()
372 dev_dbg(dev->device, " - DAR Full.\n"); in sahara_decode_status()
374 dev_dbg(dev->device, " - Error.\n"); in sahara_decode_status()
376 dev_dbg(dev->device, " - Secure.\n"); in sahara_decode_status()
378 dev_dbg(dev->device, " - Fail.\n"); in sahara_decode_status()
380 dev_dbg(dev->device, " - RNG Reseed Request.\n"); in sahara_decode_status()
382 dev_dbg(dev->device, " - RNG Active.\n"); in sahara_decode_status()
384 dev_dbg(dev->device, " - MDHA Active.\n"); in sahara_decode_status()
386 dev_dbg(dev->device, " - SKHA Active.\n"); in sahara_decode_status()
389 dev_dbg(dev->device, " - Batch Mode.\n"); in sahara_decode_status()
391 dev_dbg(dev->device, " - Dedicated Mode.\n"); in sahara_decode_status()
393 dev_dbg(dev->device, " - Debug Mode.\n"); in sahara_decode_status()
395 dev_dbg(dev->device, " - Internal state = 0x%02x\n", in sahara_decode_status()
398 dev_dbg(dev->device, "Current DAR: 0x%08x\n", in sahara_decode_status()
400 dev_dbg(dev->device, "Initial DAR: 0x%08x\n\n", in sahara_decode_status()
412 dev_dbg(dev->device, "Descriptor (%d) (%pad):\n", in sahara_dump_descriptors()
413 i, &dev->hw_phys_desc[i]); in sahara_dump_descriptors()
414 dev_dbg(dev->device, "\thdr = 0x%08x\n", dev->hw_desc[i]->hdr); in sahara_dump_descriptors()
415 dev_dbg(dev->device, "\tlen1 = %u\n", dev->hw_desc[i]->len1); in sahara_dump_descriptors()
416 dev_dbg(dev->device, "\tp1 = 0x%08x\n", dev->hw_desc[i]->p1); in sahara_dump_descriptors()
417 dev_dbg(dev->device, "\tlen2 = %u\n", dev->hw_desc[i]->len2); in sahara_dump_descriptors()
418 dev_dbg(dev->device, "\tp2 = 0x%08x\n", dev->hw_desc[i]->p2); in sahara_dump_descriptors()
419 dev_dbg(dev->device, "\tnext = 0x%08x\n", in sahara_dump_descriptors()
420 dev->hw_desc[i]->next); in sahara_dump_descriptors()
422 dev_dbg(dev->device, "\n"); in sahara_dump_descriptors()
433 dev_dbg(dev->device, "Link (%d) (%pad):\n", in sahara_dump_links()
434 i, &dev->hw_phys_link[i]); in sahara_dump_links()
435 dev_dbg(dev->device, "\tlen = %u\n", dev->hw_link[i]->len); in sahara_dump_links()
436 dev_dbg(dev->device, "\tp = 0x%08x\n", dev->hw_link[i]->p); in sahara_dump_links()
437 dev_dbg(dev->device, "\tnext = 0x%08x\n", in sahara_dump_links()
438 dev->hw_link[i]->next); in sahara_dump_links()
440 dev_dbg(dev->device, "\n"); in sahara_dump_links()
445 struct sahara_ctx *ctx = dev->ctx; in sahara_hw_descriptor_create()
452 if (ctx->flags & FLAGS_NEW_KEY) { in sahara_hw_descriptor_create()
453 memcpy(dev->key_base, ctx->key, ctx->keylen); in sahara_hw_descriptor_create()
454 ctx->flags &= ~FLAGS_NEW_KEY; in sahara_hw_descriptor_create()
456 if (dev->flags & FLAGS_CBC) { in sahara_hw_descriptor_create()
457 dev->hw_desc[idx]->len1 = AES_BLOCK_SIZE; in sahara_hw_descriptor_create()
458 dev->hw_desc[idx]->p1 = dev->iv_phys_base; in sahara_hw_descriptor_create()
460 dev->hw_desc[idx]->len1 = 0; in sahara_hw_descriptor_create()
461 dev->hw_desc[idx]->p1 = 0; in sahara_hw_descriptor_create()
463 dev->hw_desc[idx]->len2 = ctx->keylen; in sahara_hw_descriptor_create()
464 dev->hw_desc[idx]->p2 = dev->key_phys_base; in sahara_hw_descriptor_create()
465 dev->hw_desc[idx]->next = dev->hw_phys_desc[1]; in sahara_hw_descriptor_create()
467 dev->hw_desc[idx]->hdr = sahara_aes_key_hdr(dev); in sahara_hw_descriptor_create()
472 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, dev->total); in sahara_hw_descriptor_create()
473 if (dev->nb_in_sg < 0) { in sahara_hw_descriptor_create()
474 dev_err(dev->device, "Invalid numbers of src SG.\n"); in sahara_hw_descriptor_create()
475 return dev->nb_in_sg; in sahara_hw_descriptor_create()
477 dev->nb_out_sg = sg_nents_for_len(dev->out_sg, dev->total); in sahara_hw_descriptor_create()
478 if (dev->nb_out_sg < 0) { in sahara_hw_descriptor_create()
479 dev_err(dev->device, "Invalid numbers of dst SG.\n"); in sahara_hw_descriptor_create()
480 return dev->nb_out_sg; in sahara_hw_descriptor_create()
482 if ((dev->nb_in_sg + dev->nb_out_sg) > SAHARA_MAX_HW_LINK) { in sahara_hw_descriptor_create()
483 dev_err(dev->device, "not enough hw links (%d)\n", in sahara_hw_descriptor_create()
484 dev->nb_in_sg + dev->nb_out_sg); in sahara_hw_descriptor_create()
485 return -EINVAL; in sahara_hw_descriptor_create()
488 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
490 if (ret != dev->nb_in_sg) { in sahara_hw_descriptor_create()
491 dev_err(dev->device, "couldn't map in sg\n"); in sahara_hw_descriptor_create()
494 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
496 if (ret != dev->nb_out_sg) { in sahara_hw_descriptor_create()
497 dev_err(dev->device, "couldn't map out sg\n"); in sahara_hw_descriptor_create()
502 dev->hw_desc[idx]->p1 = dev->hw_phys_link[0]; in sahara_hw_descriptor_create()
503 sg = dev->in_sg; in sahara_hw_descriptor_create()
504 for (i = 0; i < dev->nb_in_sg; i++) { in sahara_hw_descriptor_create()
505 dev->hw_link[i]->len = sg->length; in sahara_hw_descriptor_create()
506 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create()
507 if (i == (dev->nb_in_sg - 1)) { in sahara_hw_descriptor_create()
508 dev->hw_link[i]->next = 0; in sahara_hw_descriptor_create()
510 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_hw_descriptor_create()
516 dev->hw_desc[idx]->p2 = dev->hw_phys_link[i]; in sahara_hw_descriptor_create()
517 sg = dev->out_sg; in sahara_hw_descriptor_create()
518 for (j = i; j < dev->nb_out_sg + i; j++) { in sahara_hw_descriptor_create()
519 dev->hw_link[j]->len = sg->length; in sahara_hw_descriptor_create()
520 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create()
521 if (j == (dev->nb_out_sg + i - 1)) { in sahara_hw_descriptor_create()
522 dev->hw_link[j]->next = 0; in sahara_hw_descriptor_create()
524 dev->hw_link[j]->next = dev->hw_phys_link[j + 1]; in sahara_hw_descriptor_create()
530 dev->hw_desc[idx]->hdr = sahara_aes_data_link_hdr(dev); in sahara_hw_descriptor_create()
531 dev->hw_desc[idx]->len1 = dev->total; in sahara_hw_descriptor_create()
532 dev->hw_desc[idx]->len2 = dev->total; in sahara_hw_descriptor_create()
533 dev->hw_desc[idx]->next = 0; in sahara_hw_descriptor_create()
538 sahara_write(dev, dev->hw_phys_desc[0], SAHARA_REG_DAR); in sahara_hw_descriptor_create()
543 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
546 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
549 return -EINVAL; in sahara_hw_descriptor_create()
561 dev_dbg(dev->device, in sahara_aes_process()
563 req->cryptlen, req->src, req->dst); in sahara_aes_process()
566 dev->total = req->cryptlen; in sahara_aes_process()
567 dev->in_sg = req->src; in sahara_aes_process()
568 dev->out_sg = req->dst; in sahara_aes_process()
572 rctx->mode &= FLAGS_MODE_MASK; in sahara_aes_process()
573 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process()
575 if ((dev->flags & FLAGS_CBC) && req->iv) in sahara_aes_process()
576 memcpy(dev->iv_base, req->iv, AES_KEYSIZE_128); in sahara_aes_process()
579 dev->ctx = ctx; in sahara_aes_process()
581 reinit_completion(&dev->dma_completion); in sahara_aes_process()
585 return -EINVAL; in sahara_aes_process()
587 timeout = wait_for_completion_timeout(&dev->dma_completion, in sahara_aes_process()
590 dev_err(dev->device, "AES timeout\n"); in sahara_aes_process()
591 return -ETIMEDOUT; in sahara_aes_process()
594 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_process()
596 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
607 ctx->keylen = keylen; in sahara_aes_setkey()
609 /* SAHARA only supports 128bit keys */ in sahara_aes_setkey()
611 memcpy(ctx->key, key, keylen); in sahara_aes_setkey()
612 ctx->flags |= FLAGS_NEW_KEY; in sahara_aes_setkey()
617 return -EINVAL; in sahara_aes_setkey()
622 crypto_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK); in sahara_aes_setkey()
623 crypto_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags & in sahara_aes_setkey()
625 return crypto_skcipher_setkey(ctx->fallback, key, keylen); in sahara_aes_setkey()
634 dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n", in sahara_aes_crypt()
635 req->cryptlen, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC)); in sahara_aes_crypt()
637 if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) { in sahara_aes_crypt()
638 dev_err(dev->device, in sahara_aes_crypt()
640 return -EINVAL; in sahara_aes_crypt()
643 rctx->mode = mode; in sahara_aes_crypt()
645 mutex_lock(&dev->queue_mutex); in sahara_aes_crypt()
646 err = crypto_enqueue_request(&dev->queue, &req->base); in sahara_aes_crypt()
647 mutex_unlock(&dev->queue_mutex); in sahara_aes_crypt()
649 wake_up_process(dev->kthread); in sahara_aes_crypt()
660 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { in sahara_aes_ecb_encrypt()
661 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in sahara_aes_ecb_encrypt()
662 skcipher_request_set_callback(&rctx->fallback_req, in sahara_aes_ecb_encrypt()
663 req->base.flags, in sahara_aes_ecb_encrypt()
664 req->base.complete, in sahara_aes_ecb_encrypt()
665 req->base.data); in sahara_aes_ecb_encrypt()
666 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in sahara_aes_ecb_encrypt()
667 req->dst, req->cryptlen, req->iv); in sahara_aes_ecb_encrypt()
668 return crypto_skcipher_encrypt(&rctx->fallback_req); in sahara_aes_ecb_encrypt()
680 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { in sahara_aes_ecb_decrypt()
681 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in sahara_aes_ecb_decrypt()
682 skcipher_request_set_callback(&rctx->fallback_req, in sahara_aes_ecb_decrypt()
683 req->base.flags, in sahara_aes_ecb_decrypt()
684 req->base.complete, in sahara_aes_ecb_decrypt()
685 req->base.data); in sahara_aes_ecb_decrypt()
686 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in sahara_aes_ecb_decrypt()
687 req->dst, req->cryptlen, req->iv); in sahara_aes_ecb_decrypt()
688 return crypto_skcipher_decrypt(&rctx->fallback_req); in sahara_aes_ecb_decrypt()
700 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { in sahara_aes_cbc_encrypt()
701 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in sahara_aes_cbc_encrypt()
702 skcipher_request_set_callback(&rctx->fallback_req, in sahara_aes_cbc_encrypt()
703 req->base.flags, in sahara_aes_cbc_encrypt()
704 req->base.complete, in sahara_aes_cbc_encrypt()
705 req->base.data); in sahara_aes_cbc_encrypt()
706 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in sahara_aes_cbc_encrypt()
707 req->dst, req->cryptlen, req->iv); in sahara_aes_cbc_encrypt()
708 return crypto_skcipher_encrypt(&rctx->fallback_req); in sahara_aes_cbc_encrypt()
720 if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { in sahara_aes_cbc_decrypt()
721 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in sahara_aes_cbc_decrypt()
722 skcipher_request_set_callback(&rctx->fallback_req, in sahara_aes_cbc_decrypt()
723 req->base.flags, in sahara_aes_cbc_decrypt()
724 req->base.complete, in sahara_aes_cbc_decrypt()
725 req->base.data); in sahara_aes_cbc_decrypt()
726 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in sahara_aes_cbc_decrypt()
727 req->dst, req->cryptlen, req->iv); in sahara_aes_cbc_decrypt()
728 return crypto_skcipher_decrypt(&rctx->fallback_req); in sahara_aes_cbc_decrypt()
736 const char *name = crypto_tfm_alg_name(&tfm->base); in sahara_aes_init_tfm()
739 ctx->fallback = crypto_alloc_skcipher(name, 0, in sahara_aes_init_tfm()
741 if (IS_ERR(ctx->fallback)) { in sahara_aes_init_tfm()
743 return PTR_ERR(ctx->fallback); in sahara_aes_init_tfm()
747 crypto_skcipher_reqsize(ctx->fallback)); in sahara_aes_init_tfm()
756 crypto_free_skcipher(ctx->fallback); in sahara_aes_exit_tfm()
764 hdr = rctx->mode; in sahara_sha_init_hdr()
766 if (rctx->first) { in sahara_sha_init_hdr()
773 if (rctx->last) in sahara_sha_init_hdr()
790 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
792 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); in sahara_sha_hw_links_create()
793 if (dev->nb_in_sg < 0) { in sahara_sha_hw_links_create()
794 dev_err(dev->device, "Invalid numbers of src SG.\n"); in sahara_sha_hw_links_create()
795 return dev->nb_in_sg; in sahara_sha_hw_links_create()
797 if ((dev->nb_in_sg) > SAHARA_MAX_HW_LINK) { in sahara_sha_hw_links_create()
798 dev_err(dev->device, "not enough hw links (%d)\n", in sahara_sha_hw_links_create()
799 dev->nb_in_sg + dev->nb_out_sg); in sahara_sha_hw_links_create()
800 return -EINVAL; in sahara_sha_hw_links_create()
803 sg = dev->in_sg; in sahara_sha_hw_links_create()
804 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, DMA_TO_DEVICE); in sahara_sha_hw_links_create()
806 return -EFAULT; in sahara_sha_hw_links_create()
808 for (i = start; i < dev->nb_in_sg + start; i++) { in sahara_sha_hw_links_create()
809 dev->hw_link[i]->len = sg->length; in sahara_sha_hw_links_create()
810 dev->hw_link[i]->p = sg->dma_address; in sahara_sha_hw_links_create()
811 if (i == (dev->nb_in_sg + start - 1)) { in sahara_sha_hw_links_create()
812 dev->hw_link[i]->next = 0; in sahara_sha_hw_links_create()
814 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_sha_hw_links_create()
830 if (rctx->first) in sahara_sha_hw_data_descriptor_create()
832 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); in sahara_sha_hw_data_descriptor_create()
835 dev->hw_desc[index]->hdr = SAHARA_HDR_MDHA_HASH; in sahara_sha_hw_data_descriptor_create()
837 dev->hw_desc[index]->len1 = rctx->total; in sahara_sha_hw_data_descriptor_create()
838 if (dev->hw_desc[index]->len1 == 0) { in sahara_sha_hw_data_descriptor_create()
840 dev->hw_desc[index]->p1 = 0; in sahara_sha_hw_data_descriptor_create()
841 rctx->sg_in_idx = 0; in sahara_sha_hw_data_descriptor_create()
844 dev->hw_desc[index]->p1 = dev->hw_phys_link[index]; in sahara_sha_hw_data_descriptor_create()
847 rctx->sg_in_idx = index; in sahara_sha_hw_data_descriptor_create()
852 dev->hw_desc[index]->p2 = dev->hw_phys_link[i]; in sahara_sha_hw_data_descriptor_create()
855 result_len = rctx->context_size; in sahara_sha_hw_data_descriptor_create()
856 dev->hw_link[i]->p = dev->context_phys_base; in sahara_sha_hw_data_descriptor_create()
858 dev->hw_link[i]->len = result_len; in sahara_sha_hw_data_descriptor_create()
859 dev->hw_desc[index]->len2 = result_len; in sahara_sha_hw_data_descriptor_create()
861 dev->hw_link[i]->next = 0; in sahara_sha_hw_data_descriptor_create()
880 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); in sahara_sha_hw_context_descriptor_create()
882 dev->hw_desc[index]->len1 = rctx->context_size; in sahara_sha_hw_context_descriptor_create()
883 dev->hw_desc[index]->p1 = dev->hw_phys_link[index]; in sahara_sha_hw_context_descriptor_create()
884 dev->hw_desc[index]->len2 = 0; in sahara_sha_hw_context_descriptor_create()
885 dev->hw_desc[index]->p2 = 0; in sahara_sha_hw_context_descriptor_create()
887 dev->hw_link[index]->len = rctx->context_size; in sahara_sha_hw_context_descriptor_create()
888 dev->hw_link[index]->p = dev->context_phys_base; in sahara_sha_hw_context_descriptor_create()
889 dev->hw_link[index]->next = 0; in sahara_sha_hw_context_descriptor_create()
896 if (!sg || !sg->length) in sahara_walk_and_recalc()
900 if (nbytes <= sg->length) { in sahara_walk_and_recalc()
901 sg->length = nbytes; in sahara_walk_and_recalc()
905 nbytes -= sg->length; in sahara_walk_and_recalc()
923 len = rctx->buf_cnt + req->nbytes; in sahara_sha_prepare_request()
926 if (!rctx->last && (len < block_size)) { in sahara_sha_prepare_request()
928 scatterwalk_map_and_copy(rctx->buf + rctx->buf_cnt, req->src, in sahara_sha_prepare_request()
929 0, req->nbytes, 0); in sahara_sha_prepare_request()
930 rctx->buf_cnt += req->nbytes; in sahara_sha_prepare_request()
936 if (rctx->buf_cnt) in sahara_sha_prepare_request()
937 memcpy(rctx->rembuf, rctx->buf, rctx->buf_cnt); in sahara_sha_prepare_request()
940 hash_later = rctx->last ? 0 : len & (block_size - 1); in sahara_sha_prepare_request()
942 unsigned int offset = req->nbytes - hash_later; in sahara_sha_prepare_request()
944 scatterwalk_map_and_copy(rctx->buf, req->src, offset, in sahara_sha_prepare_request()
949 req->nbytes = req->nbytes - hash_later; in sahara_sha_prepare_request()
951 sahara_walk_and_recalc(req->src, req->nbytes); in sahara_sha_prepare_request()
954 if (rctx->buf_cnt && req->nbytes) { in sahara_sha_prepare_request()
955 sg_init_table(rctx->in_sg_chain, 2); in sahara_sha_prepare_request()
956 sg_set_buf(rctx->in_sg_chain, rctx->rembuf, rctx->buf_cnt); in sahara_sha_prepare_request()
958 sg_chain(rctx->in_sg_chain, 2, req->src); in sahara_sha_prepare_request()
960 rctx->total = req->nbytes + rctx->buf_cnt; in sahara_sha_prepare_request()
961 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request()
963 req->src = rctx->in_sg_chain; in sahara_sha_prepare_request()
965 } else if (rctx->buf_cnt) { in sahara_sha_prepare_request()
966 if (req->src) in sahara_sha_prepare_request()
967 rctx->in_sg = req->src; in sahara_sha_prepare_request()
969 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request()
971 sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); in sahara_sha_prepare_request()
972 rctx->total = rctx->buf_cnt; in sahara_sha_prepare_request()
975 rctx->in_sg = req->src; in sahara_sha_prepare_request()
976 rctx->total = req->nbytes; in sahara_sha_prepare_request()
977 req->src = rctx->in_sg; in sahara_sha_prepare_request()
981 rctx->buf_cnt = hash_later; in sahara_sha_prepare_request()
983 return -EINPROGRESS; in sahara_sha_prepare_request()
997 if (rctx->first) { in sahara_sha_process()
999 dev->hw_desc[0]->next = 0; in sahara_sha_process()
1000 rctx->first = 0; in sahara_sha_process()
1002 memcpy(dev->context_base, rctx->context, rctx->context_size); in sahara_sha_process()
1005 dev->hw_desc[0]->next = dev->hw_phys_desc[1]; in sahara_sha_process()
1007 dev->hw_desc[1]->next = 0; in sahara_sha_process()
1013 reinit_completion(&dev->dma_completion); in sahara_sha_process()
1015 sahara_write(dev, dev->hw_phys_desc[0], SAHARA_REG_DAR); in sahara_sha_process()
1017 timeout = wait_for_completion_timeout(&dev->dma_completion, in sahara_sha_process()
1020 dev_err(dev->device, "SHA timeout\n"); in sahara_sha_process()
1021 return -ETIMEDOUT; in sahara_sha_process()
1024 if (rctx->sg_in_idx) in sahara_sha_process()
1025 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_sha_process()
1028 memcpy(rctx->context, dev->context_base, rctx->context_size); in sahara_sha_process()
1030 if (req->result) in sahara_sha_process()
1031 memcpy(req->result, rctx->context, rctx->digest_size); in sahara_sha_process()
1046 mutex_lock(&dev->queue_mutex); in sahara_queue_manage()
1047 backlog = crypto_get_backlog(&dev->queue); in sahara_queue_manage()
1048 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage()
1049 mutex_unlock(&dev->queue_mutex); in sahara_queue_manage()
1052 backlog->complete(backlog, -EINPROGRESS); in sahara_queue_manage()
1055 if (crypto_tfm_alg_type(async_req->tfm) == in sahara_queue_manage()
1068 async_req->complete(async_req, ret); in sahara_queue_manage()
1085 if (!req->nbytes && !last) in sahara_sha_enqueue()
1088 rctx->last = last; in sahara_sha_enqueue()
1090 if (!rctx->active) { in sahara_sha_enqueue()
1091 rctx->active = 1; in sahara_sha_enqueue()
1092 rctx->first = 1; in sahara_sha_enqueue()
1095 mutex_lock(&dev->queue_mutex); in sahara_sha_enqueue()
1096 ret = crypto_enqueue_request(&dev->queue, &req->base); in sahara_sha_enqueue()
1097 mutex_unlock(&dev->queue_mutex); in sahara_sha_enqueue()
1099 wake_up_process(dev->kthread); in sahara_sha_enqueue()
1113 rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA1; in sahara_sha_init()
1114 rctx->digest_size = SHA1_DIGEST_SIZE; in sahara_sha_init()
1117 rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA256; in sahara_sha_init()
1118 rctx->digest_size = SHA256_DIGEST_SIZE; in sahara_sha_init()
1121 return -EINVAL; in sahara_sha_init()
1124 rctx->context_size = rctx->digest_size + 4; in sahara_sha_init()
1125 rctx->active = 0; in sahara_sha_init()
1137 req->nbytes = 0; in sahara_sha_final()
1183 .base.cra_driver_name = "sahara-ecb-aes",
1200 .base.cra_driver_name = "sahara-cbc-aes",
1232 .cra_driver_name = "sahara-sha1",
1258 .cra_driver_name = "sahara-sha256",
1285 dev->error = 0; in sahara_irq_handler()
1288 dev->error = -EINVAL; in sahara_irq_handler()
1291 complete(&dev->dma_completion); in sahara_irq_handler()
1314 if (dev->version > SAHARA_VERSION_3) in sahara_register_algs()
1348 if (dev->version > SAHARA_VERSION_3) in sahara_unregister_algs()
1354 { .compatible = "fsl,imx53-sahara" },
1355 { .compatible = "fsl,imx27-sahara" },
1368 dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL); in sahara_probe()
1370 return -ENOMEM; in sahara_probe()
1372 dev->device = &pdev->dev; in sahara_probe()
1376 dev->regs_base = devm_platform_ioremap_resource(pdev, 0); in sahara_probe()
1377 if (IS_ERR(dev->regs_base)) in sahara_probe()
1378 return PTR_ERR(dev->regs_base); in sahara_probe()
1385 err = devm_request_irq(&pdev->dev, irq, sahara_irq_handler, in sahara_probe()
1386 0, dev_name(&pdev->dev), dev); in sahara_probe()
1388 dev_err(&pdev->dev, "failed to request irq\n"); in sahara_probe()
1393 dev->clk_ipg = devm_clk_get(&pdev->dev, "ipg"); in sahara_probe()
1394 if (IS_ERR(dev->clk_ipg)) { in sahara_probe()
1395 dev_err(&pdev->dev, "Could not get ipg clock\n"); in sahara_probe()
1396 return PTR_ERR(dev->clk_ipg); in sahara_probe()
1399 dev->clk_ahb = devm_clk_get(&pdev->dev, "ahb"); in sahara_probe()
1400 if (IS_ERR(dev->clk_ahb)) { in sahara_probe()
1401 dev_err(&pdev->dev, "Could not get ahb clock\n"); in sahara_probe()
1402 return PTR_ERR(dev->clk_ahb); in sahara_probe()
1406 dev->hw_desc[0] = dmam_alloc_coherent(&pdev->dev, in sahara_probe()
1408 &dev->hw_phys_desc[0], GFP_KERNEL); in sahara_probe()
1409 if (!dev->hw_desc[0]) { in sahara_probe()
1410 dev_err(&pdev->dev, "Could not allocate hw descriptors\n"); in sahara_probe()
1411 return -ENOMEM; in sahara_probe()
1413 dev->hw_desc[1] = dev->hw_desc[0] + 1; in sahara_probe()
1414 dev->hw_phys_desc[1] = dev->hw_phys_desc[0] + in sahara_probe()
1418 dev->key_base = dmam_alloc_coherent(&pdev->dev, 2 * AES_KEYSIZE_128, in sahara_probe()
1419 &dev->key_phys_base, GFP_KERNEL); in sahara_probe()
1420 if (!dev->key_base) { in sahara_probe()
1421 dev_err(&pdev->dev, "Could not allocate memory for key\n"); in sahara_probe()
1422 return -ENOMEM; in sahara_probe()
1424 dev->iv_base = dev->key_base + AES_KEYSIZE_128; in sahara_probe()
1425 dev->iv_phys_base = dev->key_phys_base + AES_KEYSIZE_128; in sahara_probe()
1428 dev->context_base = dmam_alloc_coherent(&pdev->dev, in sahara_probe()
1430 &dev->context_phys_base, GFP_KERNEL); in sahara_probe()
1431 if (!dev->context_base) { in sahara_probe()
1432 dev_err(&pdev->dev, "Could not allocate memory for MDHA context\n"); in sahara_probe()
1433 return -ENOMEM; in sahara_probe()
1437 dev->hw_link[0] = dmam_alloc_coherent(&pdev->dev, in sahara_probe()
1439 &dev->hw_phys_link[0], GFP_KERNEL); in sahara_probe()
1440 if (!dev->hw_link[0]) { in sahara_probe()
1441 dev_err(&pdev->dev, "Could not allocate hw links\n"); in sahara_probe()
1442 return -ENOMEM; in sahara_probe()
1445 dev->hw_phys_link[i] = dev->hw_phys_link[i - 1] + in sahara_probe()
1447 dev->hw_link[i] = dev->hw_link[i - 1] + 1; in sahara_probe()
1450 crypto_init_queue(&dev->queue, SAHARA_QUEUE_LENGTH); in sahara_probe()
1452 mutex_init(&dev->queue_mutex); in sahara_probe()
1456 dev->kthread = kthread_run(sahara_queue_manage, dev, "sahara_crypto"); in sahara_probe()
1457 if (IS_ERR(dev->kthread)) { in sahara_probe()
1458 return PTR_ERR(dev->kthread); in sahara_probe()
1461 init_completion(&dev->dma_completion); in sahara_probe()
1463 err = clk_prepare_enable(dev->clk_ipg); in sahara_probe()
1466 err = clk_prepare_enable(dev->clk_ahb); in sahara_probe()
1471 if (of_device_is_compatible(pdev->dev.of_node, "fsl,imx27-sahara")) { in sahara_probe()
1473 err = -ENODEV; in sahara_probe()
1474 } else if (of_device_is_compatible(pdev->dev.of_node, in sahara_probe()
1475 "fsl,imx53-sahara")) { in sahara_probe()
1477 err = -ENODEV; in sahara_probe()
1480 if (err == -ENODEV) { in sahara_probe()
1481 dev_err(&pdev->dev, "SAHARA version %d not supported\n", in sahara_probe()
1486 dev->version = version; in sahara_probe()
1500 dev_info(&pdev->dev, "SAHARA version %d initialized\n", version); in sahara_probe()
1505 kthread_stop(dev->kthread); in sahara_probe()
1507 clk_disable_unprepare(dev->clk_ahb); in sahara_probe()
1509 clk_disable_unprepare(dev->clk_ipg); in sahara_probe()
1518 kthread_stop(dev->kthread); in sahara_remove()
1522 clk_disable_unprepare(dev->clk_ipg); in sahara_remove()
1523 clk_disable_unprepare(dev->clk_ahb); in sahara_remove()
1542 MODULE_AUTHOR("Javier Martin <javier.martin@vista-silicon.com>");