Lines Matching refs:actx

168 static int mxs_dcp_start_dma(struct dcp_async_ctx *actx)  in mxs_dcp_start_dma()  argument
171 const int chan = actx->chan; in mxs_dcp_start_dma()
174 struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; in mxs_dcp_start_dma()
213 static int mxs_dcp_run_aes(struct dcp_async_ctx *actx, in mxs_dcp_run_aes() argument
217 struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; in mxs_dcp_run_aes()
229 if (actx->fill % AES_BLOCK_SIZE) { in mxs_dcp_run_aes()
258 desc->size = actx->fill; in mxs_dcp_run_aes()
262 ret = mxs_dcp_start_dma(actx); in mxs_dcp_run_aes()
278 struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); in mxs_dcp_aes_block_crypt() local
301 actx->fill = 0; in mxs_dcp_aes_block_crypt()
304 memcpy(key, actx->key, actx->key_len); in mxs_dcp_aes_block_crypt()
325 if (actx->fill + len > out_off) in mxs_dcp_aes_block_crypt()
326 clen = out_off - actx->fill; in mxs_dcp_aes_block_crypt()
330 memcpy(in_buf + actx->fill, src_buf, clen); in mxs_dcp_aes_block_crypt()
333 actx->fill += clen; in mxs_dcp_aes_block_crypt()
339 if (actx->fill == out_off || sg_is_last(src) || in mxs_dcp_aes_block_crypt()
341 ret = mxs_dcp_run_aes(actx, req, init); in mxs_dcp_aes_block_crypt()
347 last_out_len = actx->fill; in mxs_dcp_aes_block_crypt()
348 while (dst && actx->fill) { in mxs_dcp_aes_block_crypt()
354 actx->fill); in mxs_dcp_aes_block_crypt()
359 actx->fill -= rem; in mxs_dcp_aes_block_crypt()
451 struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); in mxs_dcp_aes_enqueue() local
455 if (unlikely(actx->key_len != AES_KEYSIZE_128)) in mxs_dcp_aes_enqueue()
460 actx->chan = DCP_CHAN_CRYPTO; in mxs_dcp_aes_enqueue()
462 spin_lock(&sdcp->lock[actx->chan]); in mxs_dcp_aes_enqueue()
463 ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); in mxs_dcp_aes_enqueue()
464 spin_unlock(&sdcp->lock[actx->chan]); in mxs_dcp_aes_enqueue()
466 wake_up_process(sdcp->thread[actx->chan]); in mxs_dcp_aes_enqueue()
494 struct dcp_async_ctx *actx = crypto_ablkcipher_ctx(tfm); in mxs_dcp_aes_setkey() local
502 actx->key_len = len; in mxs_dcp_aes_setkey()
504 memcpy(actx->key, key, len); in mxs_dcp_aes_setkey()
513 crypto_sync_skcipher_clear_flags(actx->fallback, CRYPTO_TFM_REQ_MASK); in mxs_dcp_aes_setkey()
514 crypto_sync_skcipher_set_flags(actx->fallback, in mxs_dcp_aes_setkey()
517 ret = crypto_sync_skcipher_setkey(actx->fallback, key, len); in mxs_dcp_aes_setkey()
522 tfm->base.crt_flags |= crypto_sync_skcipher_get_flags(actx->fallback) & in mxs_dcp_aes_setkey()
531 struct dcp_async_ctx *actx = crypto_tfm_ctx(tfm); in mxs_dcp_aes_fallback_init() local
538 actx->fallback = blk; in mxs_dcp_aes_fallback_init()
545 struct dcp_async_ctx *actx = crypto_tfm_ctx(tfm); in mxs_dcp_aes_fallback_exit() local
547 crypto_free_sync_skcipher(actx->fallback); in mxs_dcp_aes_fallback_exit()
559 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in mxs_dcp_run_sha() local
561 struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; in mxs_dcp_run_sha()
574 desc->control1 = actx->alg; in mxs_dcp_run_sha()
578 desc->size = actx->fill; in mxs_dcp_run_sha()
588 (actx->alg == MXS_DCP_CONTROL1_HASH_SELECT_SHA1) ? in mxs_dcp_run_sha()
603 ret = mxs_dcp_start_dma(actx); in mxs_dcp_run_sha()
621 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_req_to_buf() local
645 if (actx->fill + len > DCP_BUF_SZ) in dcp_sha_req_to_buf()
646 clen = DCP_BUF_SZ - actx->fill; in dcp_sha_req_to_buf()
650 memcpy(in_buf + actx->fill, src_buf, clen); in dcp_sha_req_to_buf()
653 actx->fill += clen; in dcp_sha_req_to_buf()
659 if (len && actx->fill == DCP_BUF_SZ) { in dcp_sha_req_to_buf()
663 actx->fill = 0; in dcp_sha_req_to_buf()
680 actx->fill = 0; in dcp_sha_req_to_buf()
729 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_init() local
737 memset(actx, 0, sizeof(*actx)); in dcp_sha_init()
740 actx->alg = MXS_DCP_CONTROL1_HASH_SELECT_SHA1; in dcp_sha_init()
742 actx->alg = MXS_DCP_CONTROL1_HASH_SELECT_SHA256; in dcp_sha_init()
744 actx->fill = 0; in dcp_sha_init()
745 actx->hot = 0; in dcp_sha_init()
746 actx->chan = DCP_CHAN_HASH_SHA; in dcp_sha_init()
748 mutex_init(&actx->mutex); in dcp_sha_init()
759 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_update_fx() local
770 mutex_lock(&actx->mutex); in dcp_sha_update_fx()
774 if (!actx->hot) { in dcp_sha_update_fx()
775 actx->hot = 1; in dcp_sha_update_fx()
779 spin_lock(&sdcp->lock[actx->chan]); in dcp_sha_update_fx()
780 ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); in dcp_sha_update_fx()
781 spin_unlock(&sdcp->lock[actx->chan]); in dcp_sha_update_fx()
783 wake_up_process(sdcp->thread[actx->chan]); in dcp_sha_update_fx()
784 mutex_unlock(&actx->mutex); in dcp_sha_update_fx()
821 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_import() local
825 memset(actx, 0, sizeof(struct dcp_async_ctx)); in dcp_sha_import()
827 memcpy(actx, &export->async_ctx, sizeof(struct dcp_async_ctx)); in dcp_sha_import()