/Linux-v5.10/crypto/ |
D | tcrypt.c | 46 #define ENCRYPT 1 macro 164 if (enc == ENCRYPT) in do_mult_aead_op() 276 if (enc == ENCRYPT) in test_mb_aead_speed() 550 if (enc == ENCRYPT) in test_aead_speed() 1162 if (enc == ENCRYPT) in do_mult_acipher_op() 1262 if (enc == ENCRYPT) in test_mb_skcipher_speed() 1503 if (enc == ENCRYPT) in test_skcipher_speed() 2054 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, in do_test() 2058 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0, in do_test() 2062 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0, in do_test() [all …]
|
D | chacha_generic.c | 83 .encrypt = crypto_chacha_crypt, 98 .encrypt = crypto_xchacha_crypt, 113 .encrypt = crypto_xchacha_crypt,
|
/Linux-v5.10/drivers/crypto/ |
D | padlock-aes.c | 66 struct cword encrypt; member 134 ctx->cword.encrypt.rounds = 10 + (key_len - 16) / 4; in aes_set_key() 135 ctx->cword.decrypt.rounds = ctx->cword.encrypt.rounds; in aes_set_key() 136 ctx->cword.encrypt.ksize = (key_len - 16) / 8; in aes_set_key() 137 ctx->cword.decrypt.ksize = ctx->cword.encrypt.ksize; in aes_set_key() 144 ctx->cword.encrypt.keygen = 1; in aes_set_key() 155 if (&ctx->cword.encrypt == per_cpu(paes_last_cword, cpu) || in aes_set_key() 310 padlock_reset_key(&ctx->cword.encrypt); in padlock_aes_encrypt() 311 ecb_crypt(in, out, ctx->E, &ctx->cword.encrypt, 1); in padlock_aes_encrypt() 312 padlock_store_cword(&ctx->cword.encrypt); in padlock_aes_encrypt() [all …]
|
D | ixp4xx_crypto.c | 160 int encrypt; member 176 struct ix_sa_dir encrypt; member 350 if (req_ctx->encrypt) { in finish_scattered_hmac() 565 ret = init_sa_dir(&ctx->encrypt); in init_tfm() 570 free_sa_dir(&ctx->encrypt); in init_tfm() 590 free_sa_dir(&ctx->encrypt); in exit_tfm() 660 static int setup_auth(struct crypto_tfm *tfm, int encrypt, unsigned authsize, in setup_auth() argument 671 dir = encrypt ? &ctx->encrypt : &ctx->decrypt; in setup_auth() 696 if (!encrypt) in setup_auth() 735 static int setup_cipher(struct crypto_tfm *tfm, int encrypt, in setup_cipher() argument [all …]
|
/Linux-v5.10/drivers/crypto/caam/ |
D | caamalg.c | 336 * AES GCM encrypt shared descriptor in gcm_set_sh_desc() 401 * RFC4106 encrypt shared descriptor in rfc4106_set_sh_desc() 469 * RFC4543 encrypt shared descriptor in rfc4543_set_sh_desc() 1057 bool all_contig, bool encrypt) in init_aead_job() argument 1069 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; in init_aead_job() 1070 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; in init_aead_job() 1106 if (encrypt) in init_aead_job() 1118 bool all_contig, bool encrypt) in init_gcm_job() argument 1127 init_aead_job(req, edesc, all_contig, encrypt); in init_gcm_job() 1132 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job() [all …]
|
D | caamalg_qi.c | 258 if (ctx->drv_ctx[ENCRYPT]) { in aead_setkey() 259 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in aead_setkey() 382 if (ctx->drv_ctx[ENCRYPT]) { in gcm_setkey() 383 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in gcm_setkey() 490 if (ctx->drv_ctx[ENCRYPT]) { in rfc4106_setkey() 491 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in rfc4106_setkey() 596 if (ctx->drv_ctx[ENCRYPT]) { in rfc4543_setkey() 597 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in rfc4543_setkey() 636 /* skcipher encrypt, decrypt shared descriptors */ in skcipher_setkey() 643 if (ctx->drv_ctx[ENCRYPT]) { in skcipher_setkey() [all …]
|
D | caamalg_qi2.c | 232 flc = &ctx->flc[ENCRYPT]; in aead_set_sh_desc() 246 dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT], in aead_set_sh_desc() 347 bool encrypt) in aead_edesc_alloc() argument 378 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc() 423 (encrypt ? authsize : 0); in aead_edesc_alloc() 442 if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv) in aead_edesc_alloc() 503 * to skip it when we authenticate or encrypt... in aead_edesc_alloc() 544 (encrypt ? ctx->authsize : (-ctx->authsize)); in aead_edesc_alloc() 596 flc = &ctx->flc[ENCRYPT]; in chachapoly_set_sh_desc() 601 dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT], in chachapoly_set_sh_desc() [all …]
|
/Linux-v5.10/arch/sparc/crypto/ |
D | des_glue.c | 95 static int __ecb_crypt(struct skcipher_request *req, bool encrypt) in __ecb_crypt() argument 107 if (encrypt) in __ecb_crypt() 136 static int __cbc_crypt(struct skcipher_request *req, bool encrypt) in __cbc_crypt() argument 148 if (encrypt) in __cbc_crypt() 153 if (encrypt) in __cbc_crypt() 244 static int __ecb3_crypt(struct skcipher_request *req, bool encrypt) in __ecb3_crypt() argument 257 if (encrypt) in __ecb3_crypt() 290 static int __cbc3_crypt(struct skcipher_request *req, bool encrypt) in __cbc3_crypt() argument 303 if (encrypt) in __cbc3_crypt() 309 if (encrypt) in __cbc3_crypt() [all …]
|
D | aes_glue.c | 36 void (*encrypt)(const u64 *key, const u32 *input, u32 *output); member 130 .encrypt = aes_sparc64_encrypt_128, 142 .encrypt = aes_sparc64_encrypt_192, 154 .encrypt = aes_sparc64_encrypt_256, 209 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); in crypto_aes_encrypt() 394 .encrypt = ecb_encrypt, 408 .encrypt = cbc_encrypt, 422 .encrypt = ctr_crypt,
|
/Linux-v5.10/net/ceph/ |
D | crypto.c | 150 * Currently these are encrypt out-buffer (ceph_buffer) and decrypt 218 static int ceph_aes_crypt(const struct ceph_crypto_key *key, bool encrypt, in ceph_aes_crypt() argument 226 int crypt_len = encrypt ? in_len + pad_byte : in_len; in ceph_aes_crypt() 230 if (encrypt) in ceph_aes_crypt() 247 if (encrypt) in ceph_aes_crypt() 254 encrypt ? "en" : "de", ret); in ceph_aes_crypt() 262 if (encrypt) { in ceph_aes_crypt() 282 int ceph_crypt(const struct ceph_crypto_key *key, bool encrypt, in ceph_crypt() argument 290 return ceph_aes_crypt(key, encrypt, buf, buf_len, in_len, in ceph_crypt()
|
/Linux-v5.10/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 38 bool encrypt; member 66 /* Finish the encrypt or decrypt process */ in virtio_crypto_dataq_sym_callback() 118 int encrypt) in virtio_crypto_alg_skcipher_init_session() argument 123 int op = encrypt ? VIRTIO_CRYPTO_OP_ENCRYPT : VIRTIO_CRYPTO_OP_DECRYPT; in virtio_crypto_alg_skcipher_init_session() 191 if (encrypt) in virtio_crypto_alg_skcipher_init_session() 206 int encrypt) in virtio_crypto_alg_skcipher_close_session() argument 225 if (encrypt) in virtio_crypto_alg_skcipher_close_session() 387 if (vc_sym_req->encrypt) { in __virtio_crypto_skcipher_do_req() 441 if (!vc_sym_req->encrypt) in __virtio_crypto_skcipher_do_req() 502 vc_sym_req->encrypt = true; in virtio_crypto_skcipher_encrypt() [all …]
|
/Linux-v5.10/fs/crypto/ |
D | crypto.c | 92 /* Encrypt or decrypt a single filesystem block of file contents */ 140 * fscrypt_encrypt_pagecache_blocks() - Encrypt filesystem blocks from a 142 * @page: The locked pagecache page containing the block(s) to encrypt 143 * @len: Total size of the block(s) to encrypt. Must be a nonzero 145 * @offs: Byte offset within @page of the first block to encrypt. Must be 205 * fscrypt_encrypt_block_inplace() - Encrypt a filesystem block in-place 207 * @page: The page containing the block to encrypt 208 * @len: Size of block to encrypt. Doesn't need to be a multiple of the 210 * @offs: Byte offset within @page at which the block to encrypt begins 215 * Encrypt a possibly-compressed filesystem block that is located in an
|
/Linux-v5.10/drivers/crypto/cavium/nitrox/ |
D | nitrox_skcipher.c | 92 if (nkreq->creq.ctrl.s.arg == ENCRYPT) { in nitrox_cbc_cipher_callback() 264 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); in nitrox_skcipher_crypt() 401 .encrypt = nitrox_aes_encrypt, 420 .encrypt = nitrox_aes_encrypt, 439 .encrypt = nitrox_aes_encrypt, 458 .encrypt = nitrox_aes_encrypt, 479 .encrypt = nitrox_aes_encrypt, 496 .encrypt = nitrox_aes_encrypt, 515 .encrypt = nitrox_3des_encrypt, 534 .encrypt = nitrox_3des_encrypt,
|
/Linux-v5.10/drivers/crypto/ccree/ |
D | cc_cipher.c | 895 "Encrypt" : "Decrypt"), req, iv, nbytes); in cc_cipher_process() 1013 .encrypt = cc_cipher_encrypt, 1031 .encrypt = cc_cipher_encrypt, 1049 .encrypt = cc_cipher_encrypt, 1067 .encrypt = cc_cipher_encrypt, 1085 .encrypt = cc_cipher_encrypt, 1103 .encrypt = cc_cipher_encrypt, 1121 .encrypt = cc_cipher_encrypt, 1143 .encrypt = cc_cipher_encrypt, 1160 .encrypt = cc_cipher_encrypt, [all …]
|
/Linux-v5.10/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 655 /* For encrypt take the last output word */ in safexcel_handle_req_result() 694 * AEAD has auth tag appended to output for encrypt and in safexcel_send_req() 1276 .encrypt = safexcel_encrypt, 1313 .encrypt = safexcel_encrypt, 1351 .encrypt = safexcel_encrypt, 1389 .encrypt = safexcel_encrypt, 1463 .encrypt = safexcel_encrypt, 1525 .encrypt = safexcel_encrypt, 1564 .encrypt = safexcel_encrypt, 1624 .encrypt = safexcel_encrypt, [all …]
|
/Linux-v5.10/include/crypto/ |
D | skcipher.h | 17 * @cryptlen: Number of bytes to encrypt or decrypt 69 * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt 80 * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt 94 * IV of exactly that size to perform the encrypt or decrypt operation. 107 int (*encrypt)(struct skcipher_request *req); member 407 * crypto_skcipher_encrypt() - encrypt plaintext 411 * Encrypt plaintext data using the skcipher_request handle. That data 486 * encrypt and decrypt API calls. During the allocation, the provided skcipher
|
D | akcipher.h | 62 * @encrypt: Function performs an encrypt operation as defined by public key 95 int (*encrypt)(struct akcipher_request *req); member 276 * crypto_akcipher_encrypt() - Invoke public key encrypt operation 278 * Function invokes the specific public key encrypt operation for a given 294 ret = alg->encrypt(req); in crypto_akcipher_encrypt()
|
/Linux-v5.10/lib/crypto/ |
D | chacha20poly1305.c | 213 int encrypt) in chacha20poly1305_crypt_sg_inplace() argument 262 if (!encrypt) in chacha20poly1305_crypt_sg_inplace() 292 if (encrypt) in chacha20poly1305_crypt_sg_inplace() 305 if (encrypt) { in chacha20poly1305_crypt_sg_inplace() 321 scatterwalk_map_and_copy(b.mac[encrypt], src, src_len, in chacha20poly1305_crypt_sg_inplace() 322 sizeof(b.mac[1]), encrypt); in chacha20poly1305_crypt_sg_inplace() 323 ret = encrypt || in chacha20poly1305_crypt_sg_inplace()
|
/Linux-v5.10/arch/arm/crypto/ |
D | chacha-glue.c | 208 .encrypt = chacha_arm, 223 .encrypt = xchacha_arm, 238 .encrypt = xchacha_arm, 258 .encrypt = chacha_neon, 274 .encrypt = xchacha_neon, 290 .encrypt = xchacha_neon,
|
D | aes-neonbs-glue.c | 340 static int __xts_crypt(struct skcipher_request *req, bool encrypt, in __xts_crypt() argument 373 int reorder_last_tweak = !encrypt && tail > 0; in __xts_crypt() 400 if (encrypt) in __xts_crypt() 435 .encrypt = ecb_encrypt, 451 .encrypt = cbc_encrypt, 470 .encrypt = ctr_encrypt, 486 .encrypt = ctr_encrypt_sync, 502 .encrypt = xts_encrypt,
|
/Linux-v5.10/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 65 static int ccp_aes_crypt(struct skcipher_request *req, bool encrypt) in ccp_aes_crypt() argument 98 (encrypt) ? CCP_AES_ACTION_ENCRYPT : CCP_AES_ACTION_DECRYPT; in ccp_aes_crypt() 160 static int ccp_aes_rfc3686_crypt(struct skcipher_request *req, bool encrypt) in ccp_aes_rfc3686_crypt() argument 181 return ccp_aes_crypt(req, encrypt); in ccp_aes_rfc3686_crypt() 208 .encrypt = ccp_aes_encrypt, 226 .encrypt = ccp_aes_rfc3686_encrypt,
|
/Linux-v5.10/drivers/staging/rtl8192u/ieee80211/ |
D | ieee80211_crypt.h | 36 /* encrypt/decrypt return < 0 on error or >= 0 on success. The return 60 /* maximum number of bytes added by encryption; encrypt buf is 62 * extra_postfix_len; encrypt need not use all this space, but
|
/Linux-v5.10/drivers/crypto/hisilicon/sec2/ |
D | sec_crypto.c | 72 if (req->c_req.encrypt) in sec_alloc_queue_id() 82 if (req->c_req.encrypt) in sec_free_queue_id() 185 if (ctx->alg_type == SEC_AEAD && !req->c_req.encrypt) in sec_req_cb() 1009 if (c_req->encrypt) in sec_skcipher_bd_fill() 1048 if (req->c_req.encrypt) in sec_update_iv() 1096 if (!err && ctx->c_ctx.c_mode == SEC_CMODE_CBC && req->c_req.encrypt) in sec_skcipher_callback() 1167 sec_auth_bd_fill_ex(auth_ctx, req->c_req.encrypt, req, sec_sqe); in sec_aead_bd_fill() 1184 if (!err && c->c_ctx.c_mode == SEC_CMODE_CBC && c_req->encrypt) in sec_aead_callback() 1188 if (!err && c_req->encrypt) { in sec_aead_callback() 1256 if (ctx->c_ctx.c_mode == SEC_CMODE_CBC && !req->c_req.encrypt) in sec_process() [all …]
|
/Linux-v5.10/Documentation/filesystems/ |
D | fscrypt.rst | 26 at the block device level. This allows it to encrypt different files 30 However, except for filenames, fscrypt does not encrypt filesystem 290 key and a single filenames encryption key. To still encrypt different 540 encrypted, even if it is empty. Users who want to encrypt an entire 566 kernel config, and the superblock must have had the "encrypt" 567 feature flag enabled using ``tune2fs -O encrypt`` or ``mkfs.ext4 -O 568 encrypt``.) 1198 cannot encrypt data in-place in the page cache, since the cached 1199 plaintext must be preserved. Instead, filesystems must encrypt into a 1262 filesystem test suite. First, run all the tests in the "encrypt" [all …]
|
/Linux-v5.10/drivers/net/wireless/intel/ipw2x00/ |
D | libipw_tx.c | 148 /* To encrypt, frame format is: in libipw_encrypt_fragment() 249 int encrypt, host_encrypt, host_encrypt_msdu; in libipw_xmit() local 285 encrypt = !(ether_type == htons(ETH_P_PAE) && ieee->ieee802_1x) && in libipw_xmit() 286 ieee->sec.encrypt; in libipw_xmit() 288 host_encrypt = ieee->host_encrypt && encrypt && crypt; in libipw_xmit() 289 host_encrypt_msdu = ieee->host_encrypt_msdu && encrypt && crypt; in libipw_xmit() 291 if (!encrypt && ieee->ieee802_1x && in libipw_xmit() 336 /* Encrypt msdu first on the whole data packet. */ in libipw_xmit() 419 txb->encrypted = encrypt; in libipw_xmit()
|