Lines Matching +full:sg +full:- +full:micro
1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) 2013,2017 Advanced Micro Devices, Inc.
19 #include "ccp-crypto.h"
28 MODULE_PARM_DESC(aes_disable, "Disable use of AES - any non-zero value");
32 MODULE_PARM_DESC(sha_disable, "Disable use of SHA - any non-zero value");
36 MODULE_PARM_DESC(des3_disable, "Disable use of 3DES - any non-zero value");
40 MODULE_PARM_DESC(rsa_disable, "Disable use of RSA - any non-zero value");
90 if (err && (err != -EINPROGRESS) && (err != -EBUSY)) in ccp_crypto_success()
111 if (crypto_cmd->tfm != tmp->tfm) in ccp_crypto_cmd_complete()
123 if (req_queue.backlog == &crypto_cmd->entry) in ccp_crypto_cmd_complete()
124 req_queue.backlog = crypto_cmd->entry.next; in ccp_crypto_cmd_complete()
128 req_queue.backlog = req_queue.backlog->next; in ccp_crypto_cmd_complete()
131 if (req_queue.backlog == &crypto_cmd->entry) in ccp_crypto_cmd_complete()
132 req_queue.backlog = crypto_cmd->entry.next; in ccp_crypto_cmd_complete()
136 req_queue.cmd_count--; in ccp_crypto_cmd_complete()
137 list_del(&crypto_cmd->entry); in ccp_crypto_cmd_complete()
148 struct crypto_async_request *req = crypto_cmd->req; in ccp_crypto_complete()
149 struct ccp_ctx *ctx = crypto_tfm_ctx(req->tfm); in ccp_crypto_complete()
152 if (err == -EINPROGRESS) { in ccp_crypto_complete()
153 /* Only propagate the -EINPROGRESS if necessary */ in ccp_crypto_complete()
154 if (crypto_cmd->ret == -EBUSY) { in ccp_crypto_complete()
155 crypto_cmd->ret = -EINPROGRESS; in ccp_crypto_complete()
156 req->complete(req, -EINPROGRESS); in ccp_crypto_complete()
162 /* Operation has completed - update the queue before invoking in ccp_crypto_complete()
168 backlog->ret = -EINPROGRESS; in ccp_crypto_complete()
169 backlog->req->complete(backlog->req, -EINPROGRESS); in ccp_crypto_complete()
172 /* Transition the state from -EBUSY to -EINPROGRESS first */ in ccp_crypto_complete()
173 if (crypto_cmd->ret == -EBUSY) in ccp_crypto_complete()
174 req->complete(req, -EINPROGRESS); in ccp_crypto_complete()
178 if (ctx->complete) in ccp_crypto_complete()
179 ret = ctx->complete(req, ret); in ccp_crypto_complete()
180 req->complete(req, ret); in ccp_crypto_complete()
187 held->cmd->flags |= CCP_CMD_MAY_BACKLOG; in ccp_crypto_complete()
188 ret = ccp_enqueue_cmd(held->cmd); in ccp_crypto_complete()
193 ctx = crypto_tfm_ctx(held->req->tfm); in ccp_crypto_complete()
194 if (ctx->complete) in ccp_crypto_complete()
195 ret = ctx->complete(held->req, ret); in ccp_crypto_complete()
196 held->req->complete(held->req, ret); in ccp_crypto_complete()
200 backlog->ret = -EINPROGRESS; in ccp_crypto_complete()
201 backlog->req->complete(backlog->req, -EINPROGRESS); in ccp_crypto_complete()
222 if (!(crypto_cmd->cmd->flags & CCP_CMD_MAY_BACKLOG)) { in ccp_crypto_enqueue_cmd()
223 ret = -ENOSPC; in ccp_crypto_enqueue_cmd()
233 if (crypto_cmd->tfm != tmp->tfm) in ccp_crypto_enqueue_cmd()
239 ret = -EINPROGRESS; in ccp_crypto_enqueue_cmd()
241 ret = ccp_enqueue_cmd(crypto_cmd->cmd); in ccp_crypto_enqueue_cmd()
247 ret = -EBUSY; in ccp_crypto_enqueue_cmd()
249 req_queue.backlog = &crypto_cmd->entry; in ccp_crypto_enqueue_cmd()
251 crypto_cmd->ret = ret; in ccp_crypto_enqueue_cmd()
254 list_add_tail(&crypto_cmd->entry, &req_queue.cmds); in ccp_crypto_enqueue_cmd()
268 * ccp_crypto_enqueue_request - queue an crypto async request for processing
280 gfp = req->flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; in ccp_crypto_enqueue_request()
284 return -ENOMEM; in ccp_crypto_enqueue_request()
291 crypto_cmd->cmd = cmd; in ccp_crypto_enqueue_request()
292 crypto_cmd->req = req; in ccp_crypto_enqueue_request()
293 crypto_cmd->tfm = req->tfm; in ccp_crypto_enqueue_request()
295 cmd->callback = ccp_crypto_complete; in ccp_crypto_enqueue_request()
296 cmd->data = crypto_cmd; in ccp_crypto_enqueue_request()
298 if (req->flags & CRYPTO_TFM_REQ_MAY_BACKLOG) in ccp_crypto_enqueue_request()
299 cmd->flags |= CCP_CMD_MAY_BACKLOG; in ccp_crypto_enqueue_request()
301 cmd->flags &= ~CCP_CMD_MAY_BACKLOG; in ccp_crypto_enqueue_request()
309 struct scatterlist *sg, *sg_last = NULL; in ccp_crypto_sg_table_add() local
311 for (sg = table->sgl; sg; sg = sg_next(sg)) in ccp_crypto_sg_table_add()
312 if (!sg_page(sg)) in ccp_crypto_sg_table_add()
314 if (WARN_ON(!sg)) in ccp_crypto_sg_table_add()
317 for (; sg && sg_add; sg = sg_next(sg), sg_add = sg_next(sg_add)) { in ccp_crypto_sg_table_add()
318 sg_set_page(sg, sg_page(sg_add), sg_add->length, in ccp_crypto_sg_table_add()
319 sg_add->offset); in ccp_crypto_sg_table_add()
320 sg_last = sg; in ccp_crypto_sg_table_add()
379 crypto_unregister_ahash(&ahash_alg->alg); in ccp_unregister_algs()
380 list_del(&ahash_alg->entry); in ccp_unregister_algs()
385 crypto_unregister_skcipher(&ablk_alg->alg); in ccp_unregister_algs()
386 list_del(&ablk_alg->entry); in ccp_unregister_algs()
391 crypto_unregister_aead(&aead_alg->alg); in ccp_unregister_algs()
392 list_del(&aead_alg->entry); in ccp_unregister_algs()
397 crypto_unregister_akcipher(&akc_alg->alg); in ccp_unregister_algs()
398 list_del(&akc_alg->entry); in ccp_unregister_algs()