/Linux-v4.19/drivers/crypto/ccp/ |
D | ccp-crypto-sha.c | 32 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); in ccp_sha_complete() local 38 if (rctx->hash_rem) { in ccp_sha_complete() 40 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete() 42 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete() 43 offset, rctx->hash_rem, 0); in ccp_sha_complete() 44 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 46 rctx->buf_count = 0; in ccp_sha_complete() 50 if (req->result && rctx->final) in ccp_sha_complete() 51 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete() 54 sg_free_table(&rctx->data_sg); in ccp_sha_complete() [all …]
|
D | ccp-crypto-aes-cmac.c | 31 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); in ccp_aes_cmac_complete() local 37 if (rctx->hash_rem) { in ccp_aes_cmac_complete() 39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete() 41 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 42 offset, rctx->hash_rem, 0); in ccp_aes_cmac_complete() 43 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 45 rctx->buf_count = 0; in ccp_aes_cmac_complete() 49 if (req->result && rctx->final) in ccp_aes_cmac_complete() 50 memcpy(req->result, rctx->iv, digest_size); in ccp_aes_cmac_complete() 53 sg_free_table(&rctx->data_sg); in ccp_aes_cmac_complete() [all …]
|
D | ccp-crypto-aes-galois.c | 71 struct ccp_aes_req_ctx *rctx = aead_request_ctx(req); in ccp_aes_gcm_crypt() local 96 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); in ccp_aes_gcm_crypt() 98 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt() 99 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt() 102 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt() 104 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_gcm_crypt() 107 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt() 108 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt() 109 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt() 110 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_aes_gcm_crypt() [all …]
|
D | ccp-crypto-aes.c | 29 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_complete() local 35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 73 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_crypt() local 91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_crypt() 92 iv_sg = &rctx->iv_sg; in ccp_aes_crypt() 94 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_crypt() 97 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_crypt() 98 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_crypt() 99 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_crypt() 100 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_aes_crypt() [all …]
|
D | ccp-crypto-des3.c | 28 struct ccp_des3_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_des3_complete() local 34 memcpy(req->info, rctx->iv, DES3_EDE_BLOCK_SIZE); in ccp_des3_complete() 81 struct ccp_des3_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_des3_crypt() local 98 memcpy(rctx->iv, req->info, DES3_EDE_BLOCK_SIZE); in ccp_des3_crypt() 99 iv_sg = &rctx->iv_sg; in ccp_des3_crypt() 101 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_des3_crypt() 104 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_des3_crypt() 105 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_des3_crypt() 106 rctx->cmd.engine = CCP_ENGINE_DES3; in ccp_des3_crypt() 107 rctx->cmd.u.des3.type = ctx->u.des3.type; in ccp_des3_crypt() [all …]
|
D | ccp-crypto-aes-xts.c | 68 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_xts_complete() local 73 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 112 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_xts_crypt() local 170 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 171 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 173 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_xts_crypt() 174 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_xts_crypt() 175 rctx->cmd.engine = CCP_ENGINE_XTS_AES_128; in ccp_aes_xts_crypt() 176 rctx->cmd.u.xts.type = CCP_AES_TYPE_128; in ccp_aes_xts_crypt() 177 rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT in ccp_aes_xts_crypt() [all …]
|
D | ccp-crypto-rsa.c | 51 struct ccp_rsa_req_ctx *rctx = akcipher_request_ctx(req); in ccp_rsa_complete() local 56 req->dst_len = rctx->cmd.u.rsa.key_size >> 3; in ccp_rsa_complete() 72 struct ccp_rsa_req_ctx *rctx = akcipher_request_ctx(req); in ccp_rsa_crypt() local 75 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_rsa_crypt() 76 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_rsa_crypt() 77 rctx->cmd.engine = CCP_ENGINE_RSA; in ccp_rsa_crypt() 79 rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */ in ccp_rsa_crypt() 81 rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg; in ccp_rsa_crypt() 82 rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len; in ccp_rsa_crypt() 84 rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg; in ccp_rsa_crypt() [all …]
|
/Linux-v4.19/drivers/crypto/qce/ |
D | sha.c | 42 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); in qce_ahash_done() local 54 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done() 55 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); in qce_ahash_done() 57 memcpy(rctx->digest, result->auth_iv, digestsize); in qce_ahash_done() 61 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); in qce_ahash_done() 62 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); in qce_ahash_done() 68 req->src = rctx->src_orig; in qce_ahash_done() 69 req->nbytes = rctx->nbytes_orig; in qce_ahash_done() 70 rctx->last_blk = false; in qce_ahash_done() 71 rctx->first_blk = false; in qce_ahash_done() [all …]
|
D | ablkcipher.c | 29 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); in qce_ablkcipher_done() local 47 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_ablkcipher_done() 48 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_ablkcipher_done() 50 sg_free_table(&rctx->dst_tbl); in qce_ablkcipher_done() 63 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); in qce_ablkcipher_async_req_handle() local 73 rctx->iv = req->info; in qce_ablkcipher_async_req_handle() 74 rctx->ivsize = crypto_ablkcipher_ivsize(ablkcipher); in qce_ablkcipher_async_req_handle() 75 rctx->cryptlen = req->nbytes; in qce_ablkcipher_async_req_handle() 81 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ablkcipher_async_req_handle() 83 rctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes); in qce_ablkcipher_async_req_handle() [all …]
|
D | common.c | 235 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); in qce_setup_regs_ahash() local 246 if (!rctx->last_blk && req->nbytes % blocksize) in qce_setup_regs_ahash() 251 if (IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 259 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen); in qce_setup_regs_ahash() 262 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 263 u32 authkey_words = rctx->authklen / sizeof(u32); in qce_setup_regs_ahash() 265 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 270 if (IS_CMAC(rctx->flags)) in qce_setup_regs_ahash() 273 if (rctx->first_blk) in qce_setup_regs_ahash() 274 memcpy(auth, rctx->digest, digestsize); in qce_setup_regs_ahash() [all …]
|
/Linux-v4.19/crypto/ |
D | lrw.c | 58 struct rctx { struct 151 struct rctx *rctx = skcipher_request_ctx(req); in post_crypt() local 152 be128 *buf = rctx->ext ?: rctx->buf; in post_crypt() 160 subreq = &rctx->subreq; in post_crypt() 177 rctx->left -= subreq->cryptlen; in post_crypt() 179 if (err || !rctx->left) in post_crypt() 182 rctx->dst = rctx->dstbuf; in post_crypt() 188 if (rctx->dst != sg) { in post_crypt() 189 rctx->dst[0] = *sg; in post_crypt() 190 sg_unmark_end(rctx->dst); in post_crypt() [all …]
|
D | xts.c | 41 struct rctx { struct 101 struct rctx *rctx = skcipher_request_ctx(req); in post_crypt() local 102 le128 *buf = rctx->ext ?: rctx->buf; in post_crypt() 110 subreq = &rctx->subreq; in post_crypt() 127 rctx->left -= subreq->cryptlen; in post_crypt() 129 if (err || !rctx->left) in post_crypt() 132 rctx->dst = rctx->dstbuf; in post_crypt() 138 if (rctx->dst != sg) { in post_crypt() 139 rctx->dst[0] = *sg; in post_crypt() 140 sg_unmark_end(rctx->dst); in post_crypt() [all …]
|
D | chacha20poly1305.c | 99 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_verify_tag() local 100 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag() 103 req->assoclen + rctx->cryptlen, in poly_verify_tag() 105 if (crypto_memneq(tag, rctx->tag, sizeof(tag))) in poly_verify_tag() 112 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_copy_tag() local 114 scatterwalk_map_and_copy(rctx->tag, req->dst, in poly_copy_tag() 115 req->assoclen + rctx->cryptlen, in poly_copy_tag() 116 sizeof(rctx->tag), 1); in poly_copy_tag() 128 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in chacha_decrypt() local 129 struct chacha_req *creq = &rctx->u.chacha; in chacha_decrypt() [all …]
|
D | mcryptd.c | 102 struct mcryptd_hash_request_ctx *rctx) in mcryptd_enqueue_request() argument 110 rctx->tag.cpu = smp_processor_id(); in mcryptd_enqueue_request() 319 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in mcryptd_hash_enqueue() local 324 rctx->complete = req->base.complete; in mcryptd_hash_enqueue() 327 ret = mcryptd_enqueue_request(queue, &req->base, rctx); in mcryptd_hash_enqueue() 337 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in mcryptd_hash_init() local 338 struct ahash_request *desc = &rctx->areq; in mcryptd_hash_init() 345 rctx->complete, req_async); in mcryptd_hash_init() 347 rctx->out = req->result; in mcryptd_hash_init() 352 rctx->complete(&req->base, err); in mcryptd_hash_init() [all …]
|
D | rmd256.c | 235 struct rmd256_ctx *rctx = shash_desc_ctx(desc); in rmd256_init() local 237 rctx->byte_count = 0; in rmd256_init() 239 rctx->state[0] = RMD_H0; in rmd256_init() 240 rctx->state[1] = RMD_H1; in rmd256_init() 241 rctx->state[2] = RMD_H2; in rmd256_init() 242 rctx->state[3] = RMD_H3; in rmd256_init() 243 rctx->state[4] = RMD_H5; in rmd256_init() 244 rctx->state[5] = RMD_H6; in rmd256_init() 245 rctx->state[6] = RMD_H7; in rmd256_init() 246 rctx->state[7] = RMD_H8; in rmd256_init() [all …]
|
D | rmd128.c | 220 struct rmd128_ctx *rctx = shash_desc_ctx(desc); in rmd128_init() local 222 rctx->byte_count = 0; in rmd128_init() 224 rctx->state[0] = RMD_H0; in rmd128_init() 225 rctx->state[1] = RMD_H1; in rmd128_init() 226 rctx->state[2] = RMD_H2; in rmd128_init() 227 rctx->state[3] = RMD_H3; in rmd128_init() 229 memset(rctx->buffer, 0, sizeof(rctx->buffer)); in rmd128_init() 237 struct rmd128_ctx *rctx = shash_desc_ctx(desc); in rmd128_update() local 238 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); in rmd128_update() 240 rctx->byte_count += len; in rmd128_update() [all …]
|
/Linux-v4.19/drivers/crypto/bcm/ |
D | cipher.c | 145 struct iproc_reqctx_s *rctx, in spu_ablkcipher_rx_sg_create() argument 151 struct iproc_ctx_s *ctx = rctx->ctx; in spu_ablkcipher_rx_sg_create() 155 rctx->gfp); in spu_ablkcipher_rx_sg_create() 162 sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len); in spu_ablkcipher_rx_sg_create() 167 sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, in spu_ablkcipher_rx_sg_create() 171 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_ablkcipher_rx_sg_create() 172 rctx->dst_nents, chunksize); in spu_ablkcipher_rx_sg_create() 181 sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, SPU_SUPDT_LEN); in spu_ablkcipher_rx_sg_create() 184 sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); in spu_ablkcipher_rx_sg_create() 186 memset(rctx->msg_buf.rx_stat, 0, SPU_RX_STATUS_LEN); in spu_ablkcipher_rx_sg_create() [all …]
|
/Linux-v4.19/drivers/crypto/stm32/ |
D | stm32-hash.c | 268 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); in stm32_hash_write_ctrl() local 275 switch (rctx->flags & HASH_FLAGS_ALGO_MASK) { in stm32_hash_write_ctrl() 292 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS); in stm32_hash_write_ctrl() 294 if (rctx->flags & HASH_FLAGS_HMAC) { in stm32_hash_write_ctrl() 311 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx) in stm32_hash_append_sg() argument 315 while ((rctx->bufcnt < rctx->buflen) && rctx->total) { in stm32_hash_append_sg() 316 count = min(rctx->sg->length - rctx->offset, rctx->total); in stm32_hash_append_sg() 317 count = min(count, rctx->buflen - rctx->bufcnt); in stm32_hash_append_sg() 320 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) { in stm32_hash_append_sg() 321 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() [all …]
|
/Linux-v4.19/arch/x86/crypto/sha1-mb/ |
D | sha1_mb.c | 95 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, in req_ctx_init() argument 98 rctx->flag = HASH_UPDATE; in req_ctx_init() 373 static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx) in sha1_mb_set_results() argument 376 struct sha1_hash_ctx *sctx = ahash_request_ctx(&rctx->areq); in sha1_mb_set_results() 377 __be32 *dst = (__be32 *) rctx->out; in sha1_mb_set_results() 390 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; in sha_finish_walk() local 394 while (!(rctx->flag & HASH_DONE)) { in sha_finish_walk() 395 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk() 401 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk() 402 rctx->flag |= HASH_DONE; in sha_finish_walk() [all …]
|
/Linux-v4.19/arch/x86/crypto/sha256-mb/ |
D | sha256_mb.c | 95 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, in req_ctx_init() argument 98 rctx->flag = HASH_UPDATE; in req_ctx_init() 372 static int sha256_mb_set_results(struct mcryptd_hash_request_ctx *rctx) in sha256_mb_set_results() argument 375 struct sha256_hash_ctx *sctx = ahash_request_ctx(&rctx->areq); in sha256_mb_set_results() 376 __be32 *dst = (__be32 *) rctx->out; in sha256_mb_set_results() 389 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; in sha_finish_walk() local 393 while (!(rctx->flag & HASH_DONE)) { in sha_finish_walk() 394 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk() 400 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk() 401 rctx->flag |= HASH_DONE; in sha_finish_walk() [all …]
|
/Linux-v4.19/arch/x86/crypto/sha512-mb/ |
D | sha512_mb.c | 95 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, in req_ctx_init() argument 98 rctx->flag = HASH_UPDATE; in req_ctx_init() 401 static int sha512_mb_set_results(struct mcryptd_hash_request_ctx *rctx) in sha512_mb_set_results() argument 404 struct sha512_hash_ctx *sctx = ahash_request_ctx(&rctx->areq); in sha512_mb_set_results() 405 __be64 *dst = (__be64 *) rctx->out; in sha512_mb_set_results() 418 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; in sha_finish_walk() local 422 while (!(rctx->flag & HASH_DONE)) { in sha_finish_walk() 423 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk() 429 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk() 430 rctx->flag |= HASH_DONE; in sha_finish_walk() [all …]
|
/Linux-v4.19/drivers/crypto/rockchip/ |
D | rk3288_crypto_ahash.c | 52 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_reg_init() local 72 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode | in rk_ahash_reg_init() 84 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_init() local 88 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in rk_ahash_init() 89 rctx->fallback_req.base.flags = req->base.flags & in rk_ahash_init() 92 return crypto_ahash_init(&rctx->fallback_req); in rk_ahash_init() 97 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_update() local 101 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in rk_ahash_update() 102 rctx->fallback_req.base.flags = req->base.flags & in rk_ahash_update() 104 rctx->fallback_req.nbytes = req->nbytes; in rk_ahash_update() [all …]
|
/Linux-v4.19/drivers/crypto/ |
D | sahara.c | 557 struct sahara_aes_reqctx *rctx; in sahara_aes_process() local 571 rctx = ablkcipher_request_ctx(req); in sahara_aes_process() 573 rctx->mode &= FLAGS_MODE_MASK; in sahara_aes_process() 574 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process() 638 struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); in sahara_aes_crypt() local 651 rctx->mode = mode; in sahara_aes_crypt() 776 struct sahara_sha_reqctx *rctx) in sahara_sha_init_hdr() argument 780 hdr = rctx->mode; in sahara_sha_init_hdr() 782 if (rctx->first) { in sahara_sha_init_hdr() 789 if (rctx->last) in sahara_sha_init_hdr() [all …]
|
D | omap-aes-gcm.c | 47 struct omap_aes_reqctx *rctx; in omap_aes_gcm_done_task() local 51 rctx = aead_request_ctx(dd->aead_req); in omap_aes_gcm_done_task() 66 scatterwalk_map_and_copy(rctx->auth_tag, in omap_aes_gcm_done_task() 78 tag = (u8 *)rctx->auth_tag; in omap_aes_gcm_done_task() 229 struct omap_aes_reqctx *rctx; in omap_aes_gcm_dma_out_callback() local 238 rctx = aead_request_ctx(dd->aead_req); in omap_aes_gcm_dma_out_callback() 239 auth_tag = (u32 *)rctx->auth_tag; in omap_aes_gcm_dma_out_callback() 255 struct omap_aes_reqctx *rctx; in omap_aes_gcm_handle_queue() local 280 rctx = aead_request_ctx(req); in omap_aes_gcm_handle_queue() 283 rctx->dd = dd; in omap_aes_gcm_handle_queue() [all …]
|
/Linux-v4.19/kernel/events/ |
D | callchain.c | 153 static struct perf_callchain_entry *get_callchain_entry(int *rctx) in get_callchain_entry() argument 158 *rctx = get_recursion_context(this_cpu_ptr(callchain_recursion)); in get_callchain_entry() 159 if (*rctx == -1) in get_callchain_entry() 169 (*rctx * perf_callchain_entry__sizeof())); in get_callchain_entry() 173 put_callchain_entry(int rctx) in put_callchain_entry() argument 175 put_recursion_context(this_cpu_ptr(callchain_recursion), rctx); in put_callchain_entry() 184 int rctx; in get_perf_callchain() local 186 entry = get_callchain_entry(&rctx); in get_perf_callchain() 187 if (rctx == -1) in get_perf_callchain() 230 put_callchain_entry(rctx); in get_perf_callchain()
|