/Linux-v5.4/crypto/ |
D | tgr192.c | 462 static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data) in tgr192_transform() argument 472 a = aa = tctx->a; in tgr192_transform() 473 b = bb = tctx->b; in tgr192_transform() 474 c = cc = tctx->c; in tgr192_transform() 488 tctx->a = a; in tgr192_transform() 489 tctx->b = b; in tgr192_transform() 490 tctx->c = c; in tgr192_transform() 495 struct tgr192_ctx *tctx = shash_desc_ctx(desc); in tgr192_init() local 497 tctx->a = 0x0123456789abcdefULL; in tgr192_init() 498 tctx->b = 0xfedcba9876543210ULL; in tgr192_init() [all …]
|
D | essiv.c | 68 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in essiv_skcipher_setkey() local 69 SHASH_DESC_ON_STACK(desc, tctx->hash); in essiv_skcipher_setkey() 73 crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK); in essiv_skcipher_setkey() 74 crypto_skcipher_set_flags(tctx->u.skcipher, in essiv_skcipher_setkey() 77 err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen); in essiv_skcipher_setkey() 79 crypto_skcipher_get_flags(tctx->u.skcipher) & in essiv_skcipher_setkey() 84 desc->tfm = tctx->hash; in essiv_skcipher_setkey() 89 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK); in essiv_skcipher_setkey() 90 crypto_cipher_set_flags(tctx->essiv_cipher, in essiv_skcipher_setkey() 93 err = crypto_cipher_setkey(tctx->essiv_cipher, salt, in essiv_skcipher_setkey() [all …]
|
D | adiantum.c | 120 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in adiantum_setkey() local 132 crypto_skcipher_clear_flags(tctx->streamcipher, CRYPTO_TFM_REQ_MASK); in adiantum_setkey() 133 crypto_skcipher_set_flags(tctx->streamcipher, in adiantum_setkey() 136 err = crypto_skcipher_setkey(tctx->streamcipher, key, keylen); in adiantum_setkey() 138 crypto_skcipher_get_flags(tctx->streamcipher) & in adiantum_setkey() 145 crypto_skcipher_reqsize(tctx->streamcipher), GFP_KERNEL); in adiantum_setkey() 151 skcipher_request_set_tfm(&data->req, tctx->streamcipher); in adiantum_setkey() 163 crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK); in adiantum_setkey() 164 crypto_cipher_set_flags(tctx->blockcipher, in adiantum_setkey() 167 err = crypto_cipher_setkey(tctx->blockcipher, keyp, in adiantum_setkey() [all …]
|
D | vmac.c | 398 static void vhash_blocks(const struct vmac_tfm_ctx *tctx, in vhash_blocks() argument 402 const u64 *kptr = tctx->nhkey; in vhash_blocks() 403 const u64 pkh = tctx->polykey[0]; in vhash_blocks() 404 const u64 pkl = tctx->polykey[1]; in vhash_blocks() 432 struct vmac_tfm_ctx *tctx = crypto_shash_ctx(tfm); in vmac_setkey() local 443 err = crypto_cipher_setkey(tctx->cipher, key, keylen); in vmac_setkey() 449 for (i = 0; i < ARRAY_SIZE(tctx->nhkey); i += 2) { in vmac_setkey() 450 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 451 tctx->nhkey[i] = be64_to_cpu(out[0]); in vmac_setkey() 452 tctx->nhkey[i+1] = be64_to_cpu(out[1]); in vmac_setkey() [all …]
|
D | xxhash_generic.c | 23 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(tfm); in xxhash64_setkey() local 25 if (keylen != sizeof(tctx->seed)) { in xxhash64_setkey() 29 tctx->seed = get_unaligned_le64(key); in xxhash64_setkey() 35 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in xxhash64_init() local 38 xxh64_reset(&dctx->xxhstate, tctx->seed); in xxhash64_init() 65 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in xxhash64_digest() local 67 put_unaligned_le64(xxh64(data, length, tctx->seed), out); in xxhash64_digest()
|
D | xcbc.c | 89 struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); in crypto_xcbc_digest_update() local 91 struct crypto_cipher *tfm = tctx->child; in crypto_xcbc_digest_update() 135 struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); in crypto_xcbc_digest_final() local 137 struct crypto_cipher *tfm = tctx->child; in crypto_xcbc_digest_final() 139 u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1); in crypto_xcbc_digest_final()
|
D | cmac.c | 122 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); in crypto_cmac_digest_update() local 124 struct crypto_cipher *tfm = tctx->child; in crypto_cmac_digest_update() 168 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); in crypto_cmac_digest_final() local 170 struct crypto_cipher *tfm = tctx->child; in crypto_cmac_digest_final() 172 u8 *consts = PTR_ALIGN((void *)tctx->ctx, in crypto_cmac_digest_final()
|
D | ccm.c | 856 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); in crypto_cbcmac_digest_update() local 858 struct crypto_cipher *tfm = tctx->child; in crypto_cbcmac_digest_update() 882 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); in crypto_cbcmac_digest_final() local 884 struct crypto_cipher *tfm = tctx->child; in crypto_cbcmac_digest_final()
|
/Linux-v5.4/drivers/crypto/rockchip/ |
D | rk3288_crypto_ahash.c | 166 struct rk_ahash_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in rk_ahash_digest() local 167 struct rk_crypto_info *dev = tctx->dev; in rk_ahash_digest() 274 struct rk_ahash_ctx *tctx = crypto_tfm_ctx(tfm); in rk_cra_hash_init() local 282 tctx->dev = algt->dev; in rk_cra_hash_init() 283 tctx->dev->addr_vir = (void *)__get_free_page(GFP_KERNEL); in rk_cra_hash_init() 284 if (!tctx->dev->addr_vir) { in rk_cra_hash_init() 285 dev_err(tctx->dev->dev, "failed to kmalloc for addr_vir\n"); in rk_cra_hash_init() 288 tctx->dev->start = rk_ahash_start; in rk_cra_hash_init() 289 tctx->dev->update = rk_ahash_crypto_rx; in rk_cra_hash_init() 290 tctx->dev->complete = rk_ahash_crypto_complete; in rk_cra_hash_init() [all …]
|
/Linux-v5.4/drivers/crypto/mediatek/ |
D | mtk-sha.c | 153 static struct mtk_cryp *mtk_sha_find_dev(struct mtk_sha_ctx *tctx) in mtk_sha_find_dev() argument 159 if (!tctx->cryp) { in mtk_sha_find_dev() 164 tctx->cryp = cryp; in mtk_sha_find_dev() 166 cryp = tctx->cryp; in mtk_sha_find_dev() 173 tctx->id = cryp->rec; in mtk_sha_find_dev() 358 struct mtk_sha_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in mtk_sha_finish_hmac() local 359 struct mtk_sha_hmac_ctx *bctx = tctx->base; in mtk_sha_finish_hmac() 375 struct mtk_sha_ctx *tctx = crypto_ahash_ctx(tfm); in mtk_sha_init() local 408 ctx->buffer = tctx->buf; in mtk_sha_init() 410 if (tctx->flags & SHA_FLAGS_HMAC) { in mtk_sha_init() [all …]
|
/Linux-v5.4/drivers/crypto/ |
D | omap-sham.c | 304 struct omap_sham_ctx *tctx = crypto_ahash_ctx(tfm); in omap_sham_copy_hash_omap4() local 305 struct omap_sham_hmac_ctx *bctx = tctx->base; in omap_sham_copy_hash_omap4() 466 struct omap_sham_ctx *tctx = crypto_ahash_ctx(tfm); in omap_sham_write_ctrl_omap4() local 467 struct omap_sham_hmac_ctx *bctx = tctx->base; in omap_sham_write_ctrl_omap4() 924 struct omap_sham_ctx *tctx = crypto_ahash_ctx(tfm); in omap_sham_init() local 930 if (!tctx->dd) { in omap_sham_init() 935 tctx->dd = dd; in omap_sham_init() 937 dd = tctx->dd; in omap_sham_init() 981 if (tctx->flags & BIT(FLAGS_HMAC)) { in omap_sham_init() 983 struct omap_sham_hmac_ctx *bctx = tctx->base; in omap_sham_init() [all …]
|
D | s5p-sss.c | 1490 struct s5p_hash_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in s5p_hash_enqueue() local 1494 return s5p_hash_handle_queue(tctx->dd, req); in s5p_hash_enqueue() 1547 struct s5p_hash_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in s5p_hash_final_shash() local 1550 return s5p_hash_shash_digest(tctx->fallback, req->base.flags, in s5p_hash_final_shash() 1628 struct s5p_hash_ctx *tctx = crypto_ahash_ctx(tfm); in s5p_hash_init() local 1630 ctx->dd = tctx->dd; in s5p_hash_init() 1638 dev_dbg(tctx->dd->dev, "init: digest size: %d\n", in s5p_hash_init() 1679 struct s5p_hash_ctx *tctx = crypto_tfm_ctx(tfm); in s5p_hash_cra_init_alg() local 1682 tctx->dd = s5p_dev; in s5p_hash_cra_init_alg() 1684 tctx->fallback = crypto_alloc_shash(alg_name, 0, in s5p_hash_cra_init_alg() [all …]
|
D | img-hash.c | 619 struct img_hash_ctx *tctx = crypto_ahash_ctx(tfm); in img_hash_digest() local 626 if (!tctx->hdev) { in img_hash_digest() 631 tctx->hdev = hdev; in img_hash_digest() 634 hdev = tctx->hdev; in img_hash_digest() 667 err = img_hash_handle_queue(tctx->hdev, req); in img_hash_digest() 717 struct img_hash_ctx *tctx = crypto_tfm_ctx(tfm); in img_hash_cra_exit() local 719 crypto_free_ahash(tctx->fallback); in img_hash_cra_exit()
|
D | atmel-sha.c | 404 static struct atmel_sha_dev *atmel_sha_find_dev(struct atmel_sha_ctx *tctx) in atmel_sha_find_dev() argument 410 if (!tctx->dd) { in atmel_sha_find_dev() 415 tctx->dd = dd; in atmel_sha_find_dev() 417 dd = tctx->dd; in atmel_sha_find_dev() 428 struct atmel_sha_ctx *tctx = crypto_ahash_ctx(tfm); in atmel_sha_init() local 430 struct atmel_sha_dev *dd = atmel_sha_find_dev(tctx); in atmel_sha_init() 1150 struct atmel_sha_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in atmel_sha_enqueue() local 1151 struct atmel_sha_dev *dd = tctx->dd; in atmel_sha_enqueue() 2287 struct atmel_sha_ctx *tctx; in atmel_sha_authenc_spawn() local 2321 tctx = crypto_ahash_ctx(tfm); in atmel_sha_authenc_spawn() [all …]
|
/Linux-v5.4/arch/arm64/crypto/ |
D | aes-glue.c | 898 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in mac_update() local 911 mac_do_update(&tctx->key, p, blocks, ctx->dg, in mac_update() 938 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in cbcmac_final() local 941 mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0); in cbcmac_final() 950 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in cmac_final() local 952 u8 *consts = tctx->consts; in cmac_final() 959 mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1); in cmac_final()
|
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_uvd.c | 477 struct ttm_operation_ctx tctx = { false, false }; in amdgpu_uvd_cs_pass1() local 500 r = ttm_bo_validate(&bo->tbo, &bo->placement, &tctx); in amdgpu_uvd_cs_pass1()
|