/Linux-v5.4/arch/x86/crypto/ |
D | glue_helper.c | 21 int glue_ecb_req_128bit(const struct common_glue_ctx *gctx, in glue_ecb_req_128bit() argument 39 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_ecb_req_128bit() 41 for (i = 0; i < gctx->num_funcs; i++) { in glue_ecb_req_128bit() 42 func_bytes = bsize * gctx->funcs[i].num_blocks; in glue_ecb_req_128bit() 49 gctx->funcs[i].fn_u.ecb(ctx, dst, src); in glue_ecb_req_128bit() 98 int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx, in glue_cbc_decrypt_req_128bit() argument 117 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_cbc_decrypt_req_128bit() 125 for (i = 0; i < gctx->num_funcs; i++) { in glue_cbc_decrypt_req_128bit() 126 num_blocks = gctx->funcs[i].num_blocks; in glue_cbc_decrypt_req_128bit() 137 gctx->funcs[i].fn_u.cbc(ctx, dst, src); in glue_cbc_decrypt_req_128bit() [all …]
|
/Linux-v5.4/drivers/gpu/drm/radeon/ |
D | atom.c | 186 struct atom_context *gctx = ctx->ctx; in atom_get_src_int() local 195 idx += gctx->reg_block; in atom_get_src_int() 196 switch (gctx->io_mode) { in atom_get_src_int() 198 val = gctx->card->reg_read(gctx->card, idx); in atom_get_src_int() 207 if (!(gctx->io_mode & 0x80)) { in atom_get_src_int() 211 if (!gctx->iio[gctx->io_mode & 0x7F]) { in atom_get_src_int() 213 gctx->io_mode & 0x7F); in atom_get_src_int() 217 atom_iio_execute(gctx, in atom_get_src_int() 218 gctx->iio[gctx->io_mode & 0x7F], in atom_get_src_int() 238 val = gctx->divmul[0]; in atom_get_src_int() [all …]
|
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
D | atom.c | 180 struct atom_context *gctx = ctx->ctx; in atom_get_src_int() local 189 idx += gctx->reg_block; in atom_get_src_int() 190 switch (gctx->io_mode) { in atom_get_src_int() 192 val = gctx->card->reg_read(gctx->card, idx); in atom_get_src_int() 201 if (!(gctx->io_mode & 0x80)) { in atom_get_src_int() 205 if (!gctx->iio[gctx->io_mode & 0x7F]) { in atom_get_src_int() 207 gctx->io_mode & 0x7F); in atom_get_src_int() 211 atom_iio_execute(gctx, in atom_get_src_int() 212 gctx->iio[gctx->io_mode & 0x7F], in atom_get_src_int() 232 val = gctx->divmul[0]; in atom_get_src_int() [all …]
|
/Linux-v5.4/arch/x86/include/asm/crypto/ |
D | glue_helper.h | 102 extern int glue_ecb_req_128bit(const struct common_glue_ctx *gctx, 108 extern int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx, 111 extern int glue_ctr_req_128bit(const struct common_glue_ctx *gctx, 114 extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
|
/Linux-v5.4/crypto/ |
D | gcm.c | 233 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_len() local 237 lengths.b = cpu_to_be64(gctx->cryptlen * 8); in gcm_hash_len() 250 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_len_continue() local 252 return gctx->complete(req, flags); in gcm_hash_len_continue() 295 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_crypt_continue() local 298 remain = gcm_remain(gctx->cryptlen); in gcm_hash_crypt_continue() 325 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_assoc_remain_continue() local 327 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue() 329 gctx->src, gctx->cryptlen, flags) ?: in gcm_hash_assoc_remain_continue() 433 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_encrypt_continue() local [all …]
|
/Linux-v5.4/drivers/crypto/mediatek/ |
D | mtk-aes.c | 883 struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(ctx); in mtk_aes_gcm_info_init() local 893 info->cmd[cnt++] = AES_GCM_CMD3 | cpu_to_le32(gctx->textlen); in mtk_aes_gcm_info_init() 896 info->cmd[cnt++] = AES_GCM_CMD4 | cpu_to_le32(gctx->authsize); in mtk_aes_gcm_info_init() 899 info->cmd[cnt++] = AES_GCM_CMD5 | cpu_to_le32(gctx->authsize); in mtk_aes_gcm_info_init() 900 info->cmd[cnt++] = AES_GCM_CMD6 | cpu_to_le32(gctx->authsize); in mtk_aes_gcm_info_init() 960 struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(aes->ctx); in mtk_aes_gcm_start() local 972 aes->total = len + gctx->authsize; in mtk_aes_gcm_start() 974 scatterwalk_map_and_copy(tag, req->dst, len, gctx->authsize, 1); in mtk_aes_gcm_start() 986 struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(ctx); in mtk_aes_gcm_crypt() local 996 gctx->textlen = req->cryptlen - (enc ? 0 : gctx->authsize); in mtk_aes_gcm_crypt() [all …]
|
/Linux-v5.4/net/sunrpc/auth_gss/ |
D | gss_krb5_wrap.c | 587 gss_wrap_kerberos(struct gss_ctx *gctx, int offset, in gss_wrap_kerberos() argument 590 struct krb5_ctx *kctx = gctx->internal_ctx_id; in gss_wrap_kerberos() 606 gss_unwrap_kerberos(struct gss_ctx *gctx, int offset, struct xdr_buf *buf) in gss_unwrap_kerberos() argument 608 struct krb5_ctx *kctx = gctx->internal_ctx_id; in gss_unwrap_kerberos()
|
/Linux-v5.4/kernel/events/ |
D | core.c | 10847 struct perf_event_context *gctx; in __perf_event_ctx_lock_double() local 10851 gctx = READ_ONCE(group_leader->ctx); in __perf_event_ctx_lock_double() 10852 if (!refcount_inc_not_zero(&gctx->refcount)) { in __perf_event_ctx_lock_double() 10858 mutex_lock_double(&gctx->mutex, &ctx->mutex); in __perf_event_ctx_lock_double() 10860 if (group_leader->ctx != gctx) { in __perf_event_ctx_lock_double() 10862 mutex_unlock(&gctx->mutex); in __perf_event_ctx_lock_double() 10863 put_ctx(gctx); in __perf_event_ctx_lock_double() 10867 return gctx; in __perf_event_ctx_lock_double() 10885 struct perf_event_context *ctx, *uninitialized_var(gctx); in SYSCALL_DEFINE5() 11120 gctx = __perf_event_ctx_lock_double(group_leader, ctx); in SYSCALL_DEFINE5() [all …]
|
/Linux-v5.4/drivers/crypto/chelsio/ |
D | chcr_algo.c | 114 static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx) in GCM_CTX() argument 116 return gctx->ctx->gcm; in GCM_CTX() 119 static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx) in AUTHENC_CTX() argument 121 return gctx->ctx->authenc; in AUTHENC_CTX() 3354 struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx); in chcr_gcm_setkey() local 3405 memset(gctx->ghash_h, 0, AEAD_H_SIZE); in chcr_gcm_setkey() 3406 aes_encrypt(&aes, gctx->ghash_h, gctx->ghash_h); in chcr_gcm_setkey()
|