Lines Matching refs:dd

108 	struct atmel_aes_dev	*dd;  member
348 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
350 u32 value = readl_relaxed(dd->io_base + offset); in atmel_aes_read()
353 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_read()
356 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_aes_read()
364 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
368 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_write()
371 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_aes_write()
376 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
379 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
383 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
386 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
390 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
393 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_block() argument
396 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
399 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_block() argument
402 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
405 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd, in atmel_aes_wait_for_data_ready() argument
408 u32 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_wait_for_data_ready()
411 return resume(dd); in atmel_aes_wait_for_data_ready()
413 dd->resume = resume; in atmel_aes_wait_for_data_ready()
414 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_wait_for_data_ready()
430 if (!ctx->dd) { in atmel_aes_find_dev()
435 ctx->dd = aes_dd; in atmel_aes_find_dev()
437 aes_dd = ctx->dd; in atmel_aes_find_dev()
445 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
449 err = clk_enable(dd->iclk); in atmel_aes_hw_init()
453 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
454 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
459 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
461 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
464 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
468 err = atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
472 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
474 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
476 clk_disable(dd->iclk); in atmel_aes_hw_version_init()
480 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd, in atmel_aes_set_mode() argument
484 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; in atmel_aes_set_mode()
487 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd) in atmel_aes_is_encrypt() argument
489 return (dd->flags & AES_FLAGS_ENCRYPT); in atmel_aes_is_encrypt()
493 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
496 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_complete() argument
499 if (dd->ctx->is_aead) in atmel_aes_complete()
500 atmel_aes_authenc_complete(dd, err); in atmel_aes_complete()
503 clk_disable(dd->iclk); in atmel_aes_complete()
504 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_complete()
506 if (!dd->ctx->is_aead) { in atmel_aes_complete()
508 ablkcipher_request_cast(dd->areq); in atmel_aes_complete()
527 if (dd->is_async) in atmel_aes_complete()
528 dd->areq->complete(dd->areq, err); in atmel_aes_complete()
530 tasklet_schedule(&dd->queue_task); in atmel_aes_complete()
535 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl_key() argument
548 valmr |= dd->flags & AES_FLAGS_MODE_MASK; in atmel_aes_write_ctrl_key()
552 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl_key()
558 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl_key()
560 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen)); in atmel_aes_write_ctrl_key()
563 atmel_aes_write_block(dd, AES_IVR(0), iv); in atmel_aes_write_ctrl_key()
566 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl() argument
570 atmel_aes_write_ctrl_key(dd, use_dma, iv, in atmel_aes_write_ctrl()
571 dd->ctx->key, dd->ctx->keylen); in atmel_aes_write_ctrl()
576 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd) in atmel_aes_cpu_transfer() argument
582 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data); in atmel_aes_cpu_transfer()
583 dd->data += 4; in atmel_aes_cpu_transfer()
584 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
586 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
589 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_transfer()
591 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_cpu_transfer()
593 dd->resume = atmel_aes_cpu_transfer; in atmel_aes_cpu_transfer()
594 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_cpu_transfer()
599 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_cpu_transfer()
600 dd->buf, dd->total)) in atmel_aes_cpu_transfer()
604 return atmel_aes_complete(dd, err); in atmel_aes_cpu_transfer()
606 return dd->cpu_transfer_complete(dd); in atmel_aes_cpu_transfer()
609 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd, in atmel_aes_cpu_start() argument
620 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_cpu_start()
622 dd->total = len; in atmel_aes_cpu_start()
623 dd->real_dst = dst; in atmel_aes_cpu_start()
624 dd->cpu_transfer_complete = resume; in atmel_aes_cpu_start()
625 dd->datalen = len + padlen; in atmel_aes_cpu_start()
626 dd->data = (u32 *)dd->buf; in atmel_aes_cpu_start()
627 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_start()
628 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer); in atmel_aes_cpu_start()
636 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd, in atmel_aes_check_aligned() argument
643 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
651 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
660 if (!IS_ALIGNED(sg->length, dd->ctx->block_size)) in atmel_aes_check_aligned()
686 static int atmel_aes_map(struct atmel_aes_dev *dd, in atmel_aes_map() argument
694 dd->total = len; in atmel_aes_map()
695 dd->src.sg = src; in atmel_aes_map()
696 dd->dst.sg = dst; in atmel_aes_map()
697 dd->real_dst = dst; in atmel_aes_map()
699 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src); in atmel_aes_map()
703 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); in atmel_aes_map()
705 padlen = atmel_aes_padlen(len, dd->ctx->block_size); in atmel_aes_map()
707 if (dd->buflen < len + padlen) in atmel_aes_map()
711 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_map()
712 dd->src.sg = &dd->aligned_sg; in atmel_aes_map()
713 dd->src.nents = 1; in atmel_aes_map()
714 dd->src.remainder = 0; in atmel_aes_map()
718 dd->dst.sg = &dd->aligned_sg; in atmel_aes_map()
719 dd->dst.nents = 1; in atmel_aes_map()
720 dd->dst.remainder = 0; in atmel_aes_map()
723 sg_init_table(&dd->aligned_sg, 1); in atmel_aes_map()
724 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen); in atmel_aes_map()
727 if (dd->src.sg == dd->dst.sg) { in atmel_aes_map()
728 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
730 dd->dst.sg_len = dd->src.sg_len; in atmel_aes_map()
731 if (!dd->src.sg_len) in atmel_aes_map()
734 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
736 if (!dd->src.sg_len) in atmel_aes_map()
739 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_map()
741 if (!dd->dst.sg_len) { in atmel_aes_map()
742 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
751 static void atmel_aes_unmap(struct atmel_aes_dev *dd) in atmel_aes_unmap() argument
753 if (dd->src.sg == dd->dst.sg) { in atmel_aes_unmap()
754 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
757 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
758 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
760 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_unmap()
763 if (dd->dst.sg != &dd->aligned_sg) in atmel_aes_unmap()
764 atmel_aes_restore_sg(&dd->dst); in atmel_aes_unmap()
766 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
769 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
770 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
773 if (dd->dst.sg == &dd->aligned_sg) in atmel_aes_unmap()
774 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_unmap()
775 dd->buf, dd->total); in atmel_aes_unmap()
778 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_start() argument
798 dma = &dd->src; in atmel_aes_dma_transfer_start()
800 config.dst_addr = dd->phys_base + AES_IDATAR(0); in atmel_aes_dma_transfer_start()
804 dma = &dd->dst; in atmel_aes_dma_transfer_start()
806 config.src_addr = dd->phys_base + AES_ODATAR(0); in atmel_aes_dma_transfer_start()
823 desc->callback_param = dd; in atmel_aes_dma_transfer_start()
830 static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_stop() argument
837 dma = &dd->src; in atmel_aes_dma_transfer_stop()
841 dma = &dd->dst; in atmel_aes_dma_transfer_stop()
851 static int atmel_aes_dma_start(struct atmel_aes_dev *dd, in atmel_aes_dma_start() argument
861 switch (dd->ctx->block_size) { in atmel_aes_dma_start()
880 maxburst = dd->caps.max_burst_size; in atmel_aes_dma_start()
888 err = atmel_aes_map(dd, src, dst, len); in atmel_aes_dma_start()
892 dd->resume = resume; in atmel_aes_dma_start()
895 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM, in atmel_aes_dma_start()
901 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV, in atmel_aes_dma_start()
909 atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM); in atmel_aes_dma_start()
911 atmel_aes_unmap(dd); in atmel_aes_dma_start()
913 return atmel_aes_complete(dd, err); in atmel_aes_dma_start()
916 static void atmel_aes_dma_stop(struct atmel_aes_dev *dd) in atmel_aes_dma_stop() argument
918 atmel_aes_dma_transfer_stop(dd, DMA_MEM_TO_DEV); in atmel_aes_dma_stop()
919 atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM); in atmel_aes_dma_stop()
920 atmel_aes_unmap(dd); in atmel_aes_dma_stop()
925 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
927 atmel_aes_dma_stop(dd); in atmel_aes_dma_callback()
928 dd->is_async = true; in atmel_aes_dma_callback()
929 (void)dd->resume(dd); in atmel_aes_dma_callback()
932 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
941 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
943 ret = crypto_enqueue_request(&dd->queue, new_areq); in atmel_aes_handle_queue()
944 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
945 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
948 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
949 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
951 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
952 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
962 dd->areq = areq; in atmel_aes_handle_queue()
963 dd->ctx = ctx; in atmel_aes_handle_queue()
965 dd->is_async = start_async; in atmel_aes_handle_queue()
968 err = ctx->start(dd); in atmel_aes_handle_queue()
975 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd) in atmel_aes_transfer_complete() argument
977 return atmel_aes_complete(dd, 0); in atmel_aes_transfer_complete()
980 static int atmel_aes_start(struct atmel_aes_dev *dd) in atmel_aes_start() argument
982 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_start()
985 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
988 atmel_aes_set_mode(dd, rctx); in atmel_aes_start()
990 err = atmel_aes_hw_init(dd); in atmel_aes_start()
992 return atmel_aes_complete(dd, err); in atmel_aes_start()
994 atmel_aes_write_ctrl(dd, use_dma, req->info); in atmel_aes_start()
996 return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_start()
999 return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_start()
1009 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd) in atmel_aes_ctr_transfer() argument
1011 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_transfer()
1012 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_ctr_transfer()
1019 ctx->offset += dd->total; in atmel_aes_ctr_transfer()
1021 return atmel_aes_transfer_complete(dd); in atmel_aes_ctr_transfer()
1027 if (dd->caps.has_ctr32) { in atmel_aes_ctr_transfer()
1056 atmel_aes_write_ctrl(dd, use_dma, ctx->iv); in atmel_aes_ctr_transfer()
1067 return atmel_aes_dma_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1070 return atmel_aes_cpu_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1074 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd) in atmel_aes_ctr_start() argument
1076 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_start()
1077 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_ctr_start()
1081 atmel_aes_set_mode(dd, rctx); in atmel_aes_ctr_start()
1083 err = atmel_aes_hw_init(dd); in atmel_aes_ctr_start()
1085 return atmel_aes_complete(dd, err); in atmel_aes_ctr_start()
1089 dd->total = 0; in atmel_aes_ctr_start()
1090 return atmel_aes_ctr_transfer(dd); in atmel_aes_ctr_start()
1098 struct atmel_aes_dev *dd; in atmel_aes_crypt() local
1123 dd = atmel_aes_find_dev(ctx); in atmel_aes_crypt()
1124 if (!dd) in atmel_aes_crypt()
1137 return atmel_aes_handle_queue(dd, &req->base); in atmel_aes_crypt()
1454 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1458 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1459 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1461 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1462 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1463 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1464 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1465 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1466 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1467 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1475 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, in atmel_aes_gcm_ghash() argument
1480 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash()
1482 dd->data = (u32 *)data; in atmel_aes_gcm_ghash()
1483 dd->datalen = datalen; in atmel_aes_gcm_ghash()
1488 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_ghash()
1489 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init); in atmel_aes_gcm_ghash()
1492 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_init() argument
1494 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_init()
1497 atmel_aes_write(dd, AES_AADLENR, dd->total); in atmel_aes_gcm_ghash_init()
1498 atmel_aes_write(dd, AES_CLENR, 0); in atmel_aes_gcm_ghash_init()
1502 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in); in atmel_aes_gcm_ghash_init()
1504 return atmel_aes_gcm_ghash_finalize(dd); in atmel_aes_gcm_ghash_init()
1507 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_finalize() argument
1509 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_finalize()
1513 while (dd->datalen > 0) { in atmel_aes_gcm_ghash_finalize()
1514 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_ghash_finalize()
1515 dd->data += 4; in atmel_aes_gcm_ghash_finalize()
1516 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1518 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_ghash_finalize()
1520 dd->resume = atmel_aes_gcm_ghash_finalize; in atmel_aes_gcm_ghash_finalize()
1521 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_ghash_finalize()
1527 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out); in atmel_aes_gcm_ghash_finalize()
1529 return ctx->ghash_resume(dd); in atmel_aes_gcm_ghash_finalize()
1533 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd) in atmel_aes_gcm_start() argument
1535 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_start()
1536 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_start()
1542 u8 *data = dd->buf; in atmel_aes_gcm_start()
1545 atmel_aes_set_mode(dd, rctx); in atmel_aes_gcm_start()
1547 err = atmel_aes_hw_init(dd); in atmel_aes_gcm_start()
1549 return atmel_aes_complete(dd, err); in atmel_aes_gcm_start()
1554 return atmel_aes_gcm_process(dd); in atmel_aes_gcm_start()
1559 if (datalen > dd->buflen) in atmel_aes_gcm_start()
1560 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_start()
1566 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen, in atmel_aes_gcm_start()
1570 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd) in atmel_aes_gcm_process() argument
1572 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_process()
1573 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_process()
1575 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_process()
1587 dd->flags |= AES_FLAGS_GTAGEN; in atmel_aes_gcm_process()
1589 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_process()
1590 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length); in atmel_aes_gcm_process()
1593 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd) in atmel_aes_gcm_length() argument
1595 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_length()
1596 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_length()
1603 atmel_aes_write_block(dd, AES_IVR(0), j0); in atmel_aes_gcm_length()
1607 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length()
1608 atmel_aes_write(dd, AES_CLENR, ctx->textlen); in atmel_aes_gcm_length()
1612 dd->datalen = 0; in atmel_aes_gcm_length()
1613 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1618 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length()
1619 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_length()
1620 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length()
1623 dd->data = (u32 *)dd->buf; in atmel_aes_gcm_length()
1624 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length()
1625 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1628 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd) in atmel_aes_gcm_data() argument
1630 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_data()
1631 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_data()
1637 while (dd->datalen > 0) { in atmel_aes_gcm_data()
1638 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_data()
1639 dd->data += 4; in atmel_aes_gcm_data()
1640 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1642 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_data()
1644 dd->resume = atmel_aes_gcm_data; in atmel_aes_gcm_data()
1645 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_data()
1652 return atmel_aes_gcm_tag_init(dd); in atmel_aes_gcm_data()
1661 mr = atmel_aes_read(dd, AES_MR); in atmel_aes_gcm_data()
1664 if (dd->caps.has_dualbuff) in atmel_aes_gcm_data()
1666 atmel_aes_write(dd, AES_MR, mr); in atmel_aes_gcm_data()
1668 return atmel_aes_dma_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1672 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1676 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag_init() argument
1678 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag_init()
1679 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_tag_init()
1680 u64 *data = dd->buf; in atmel_aes_gcm_tag_init()
1682 if (likely(dd->flags & AES_FLAGS_GTAGEN)) { in atmel_aes_gcm_tag_init()
1683 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) { in atmel_aes_gcm_tag_init()
1684 dd->resume = atmel_aes_gcm_tag_init; in atmel_aes_gcm_tag_init()
1685 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY); in atmel_aes_gcm_tag_init()
1689 return atmel_aes_gcm_finalize(dd); in atmel_aes_gcm_tag_init()
1693 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash); in atmel_aes_gcm_tag_init()
1698 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1702 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag() argument
1704 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag()
1711 flags = dd->flags; in atmel_aes_gcm_tag()
1712 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN); in atmel_aes_gcm_tag()
1713 dd->flags |= AES_FLAGS_CTR; in atmel_aes_gcm_tag()
1714 atmel_aes_write_ctrl(dd, false, ctx->j0); in atmel_aes_gcm_tag()
1715 dd->flags = flags; in atmel_aes_gcm_tag()
1717 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash); in atmel_aes_gcm_tag()
1718 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize); in atmel_aes_gcm_tag()
1721 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_finalize() argument
1723 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_finalize()
1724 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_finalize()
1726 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_finalize()
1731 if (likely(dd->flags & AES_FLAGS_GTAGEN)) in atmel_aes_gcm_finalize()
1732 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag); in atmel_aes_gcm_finalize()
1734 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag); in atmel_aes_gcm_finalize()
1746 return atmel_aes_complete(dd, err); in atmel_aes_gcm_finalize()
1754 struct atmel_aes_dev *dd; in atmel_aes_gcm_crypt() local
1760 dd = atmel_aes_find_dev(ctx); in atmel_aes_gcm_crypt()
1761 if (!dd) in atmel_aes_gcm_crypt()
1767 return atmel_aes_handle_queue(dd, &req->base); in atmel_aes_gcm_crypt()
1858 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1860 static int atmel_aes_xts_start(struct atmel_aes_dev *dd) in atmel_aes_xts_start() argument
1862 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); in atmel_aes_xts_start()
1863 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_xts_start()
1868 atmel_aes_set_mode(dd, rctx); in atmel_aes_xts_start()
1870 err = atmel_aes_hw_init(dd); in atmel_aes_xts_start()
1872 return atmel_aes_complete(dd, err); in atmel_aes_xts_start()
1875 flags = dd->flags; in atmel_aes_xts_start()
1876 dd->flags &= ~AES_FLAGS_MODE_MASK; in atmel_aes_xts_start()
1877 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); in atmel_aes_xts_start()
1878 atmel_aes_write_ctrl_key(dd, false, NULL, in atmel_aes_xts_start()
1880 dd->flags = flags; in atmel_aes_xts_start()
1882 atmel_aes_write_block(dd, AES_IDATAR(0), req->info); in atmel_aes_xts_start()
1883 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); in atmel_aes_xts_start()
1886 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) in atmel_aes_xts_process_data() argument
1888 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_xts_process_data()
1896 atmel_aes_read_block(dd, AES_ODATAR(0), tweak); in atmel_aes_xts_process_data()
1910 atmel_aes_write_ctrl(dd, use_dma, NULL); in atmel_aes_xts_process_data()
1911 atmel_aes_write_block(dd, AES_TWR(0), tweak); in atmel_aes_xts_process_data()
1912 atmel_aes_write_block(dd, AES_ALPHAR(0), one); in atmel_aes_xts_process_data()
1914 return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_xts_process_data()
1917 return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_xts_process_data()
1982 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1983 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1985 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1987 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1988 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1991 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_authenc_complete() argument
1993 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_complete()
1996 if (err && (dd->flags & AES_FLAGS_OWN_SHA)) in atmel_aes_authenc_complete()
1998 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_complete()
2001 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd) in atmel_aes_authenc_start() argument
2003 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_start()
2009 atmel_aes_set_mode(dd, &rctx->base); in atmel_aes_authenc_start()
2011 err = atmel_aes_hw_init(dd); in atmel_aes_authenc_start()
2013 return atmel_aes_complete(dd, err); in atmel_aes_authenc_start()
2016 atmel_aes_authenc_init, dd); in atmel_aes_authenc_start()
2019 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_init() argument
2022 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_init()
2026 dd->is_async = true; in atmel_aes_authenc_init()
2028 return atmel_aes_complete(dd, err); in atmel_aes_authenc_init()
2031 dd->flags |= AES_FLAGS_OWN_SHA; in atmel_aes_authenc_init()
2037 atmel_aes_authenc_transfer, dd); in atmel_aes_authenc_init()
2040 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_transfer() argument
2043 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_transfer()
2045 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_transfer()
2051 dd->is_async = true; in atmel_aes_authenc_transfer()
2053 return atmel_aes_complete(dd, err); in atmel_aes_authenc_transfer()
2072 atmel_aes_write_ctrl(dd, true, iv); in atmel_aes_authenc_transfer()
2076 atmel_aes_write(dd, AES_EMR, emr); in atmel_aes_authenc_transfer()
2079 return atmel_aes_dma_start(dd, src, dst, rctx->textlen, in atmel_aes_authenc_transfer()
2083 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd) in atmel_aes_authenc_digest() argument
2085 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_digest()
2089 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_digest()
2092 atmel_aes_authenc_final, dd); in atmel_aes_authenc_digest()
2095 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_final() argument
2098 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_final()
2101 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_final()
2106 dd->is_async = true; in atmel_aes_authenc_final()
2121 return atmel_aes_complete(dd, err); in atmel_aes_authenc_final()
2219 struct atmel_aes_dev *dd; in atmel_aes_authenc_crypt() local
2238 dd = atmel_aes_find_dev(ctx); in atmel_aes_authenc_crypt()
2239 if (!dd) in atmel_aes_authenc_crypt()
2242 return atmel_aes_handle_queue(dd, &req->base); in atmel_aes_authenc_crypt()
2361 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
2363 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER); in atmel_aes_buff_init()
2364 dd->buflen = ATMEL_AES_BUFFER_SIZE; in atmel_aes_buff_init()
2365 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
2367 if (!dd->buf) { in atmel_aes_buff_init()
2368 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
2375 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
2377 free_page((unsigned long)dd->buf); in atmel_aes_buff_cleanup()
2392 static int atmel_aes_dma_init(struct atmel_aes_dev *dd, in atmel_aes_dma_init() argument
2403 dd->src.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, in atmel_aes_dma_init()
2404 slave, dd->dev, "tx"); in atmel_aes_dma_init()
2405 if (!dd->src.chan) in atmel_aes_dma_init()
2409 dd->dst.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, in atmel_aes_dma_init()
2410 slave, dd->dev, "rx"); in atmel_aes_dma_init()
2411 if (!dd->dst.chan) in atmel_aes_dma_init()
2417 dma_release_channel(dd->src.chan); in atmel_aes_dma_init()
2419 dev_warn(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
2423 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
2425 dma_release_channel(dd->dst.chan); in atmel_aes_dma_cleanup()
2426 dma_release_channel(dd->src.chan); in atmel_aes_dma_cleanup()
2431 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
2433 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
2438 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_done_task() local
2440 dd->is_async = true; in atmel_aes_done_task()
2441 (void)dd->resume(dd); in atmel_aes_done_task()
2462 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
2467 if (dd->caps.has_authenc) in atmel_aes_unregister_algs()
2472 if (dd->caps.has_xts) in atmel_aes_unregister_algs()
2475 if (dd->caps.has_gcm) in atmel_aes_unregister_algs()
2478 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
2485 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
2495 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
2501 if (dd->caps.has_gcm) { in atmel_aes_register_algs()
2507 if (dd->caps.has_xts) { in atmel_aes_register_algs()
2514 if (dd->caps.has_authenc) { in atmel_aes_register_algs()
2545 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
2547 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
2548 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
2549 dd->caps.has_ctr32 = 0; in atmel_aes_get_cap()
2550 dd->caps.has_gcm = 0; in atmel_aes_get_cap()
2551 dd->caps.has_xts = 0; in atmel_aes_get_cap()
2552 dd->caps.has_authenc = 0; in atmel_aes_get_cap()
2553 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
2556 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
2558 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2559 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2560 dd->caps.has_ctr32 = 1; in atmel_aes_get_cap()
2561 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2562 dd->caps.has_xts = 1; in atmel_aes_get_cap()
2563 dd->caps.has_authenc = 1; in atmel_aes_get_cap()
2564 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2567 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2568 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2569 dd->caps.has_ctr32 = 1; in atmel_aes_get_cap()
2570 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2571 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2574 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2575 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2576 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2581 dev_warn(dd->dev, in atmel_aes_get_cap()