/Linux-v5.15/crypto/ |
D | tcrypt.c | 46 #define ENCRYPT 1 macro 164 if (enc == ENCRYPT) in do_mult_aead_op() 276 if (enc == ENCRYPT) in test_mb_aead_speed() 557 if (enc == ENCRYPT) in test_aead_speed() 1175 if (enc == ENCRYPT) in do_mult_acipher_op() 1275 if (enc == ENCRYPT) in test_mb_skcipher_speed() 1518 if (enc == ENCRYPT) in test_skcipher_speed() 2064 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, in do_test() 2068 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0, in do_test() 2072 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0, in do_test() [all …]
|
/Linux-v5.15/drivers/crypto/ |
D | padlock-aes.c | 66 struct cword encrypt; member 134 ctx->cword.encrypt.rounds = 10 + (key_len - 16) / 4; in aes_set_key() 135 ctx->cword.decrypt.rounds = ctx->cword.encrypt.rounds; in aes_set_key() 136 ctx->cword.encrypt.ksize = (key_len - 16) / 8; in aes_set_key() 137 ctx->cword.decrypt.ksize = ctx->cword.encrypt.ksize; in aes_set_key() 144 ctx->cword.encrypt.keygen = 1; in aes_set_key() 155 if (&ctx->cword.encrypt == per_cpu(paes_last_cword, cpu) || in aes_set_key() 310 padlock_reset_key(&ctx->cword.encrypt); in padlock_aes_encrypt() 311 ecb_crypt(in, out, ctx->E, &ctx->cword.encrypt, 1); in padlock_aes_encrypt() 312 padlock_store_cword(&ctx->cword.encrypt); in padlock_aes_encrypt() [all …]
|
D | ixp4xx_crypto.c | 154 bool encrypt; member 164 int encrypt; member 180 struct ix_sa_dir encrypt; member 358 if (req_ctx->encrypt) { in finish_scattered_hmac() 398 if (req_ctx->encrypt) { in one_packet() 619 ret = init_sa_dir(&ctx->encrypt); in init_tfm() 624 free_sa_dir(&ctx->encrypt); in init_tfm() 661 free_sa_dir(&ctx->encrypt); in exit_tfm() 735 static int setup_auth(struct crypto_tfm *tfm, int encrypt, unsigned int authsize, in setup_auth() argument 746 dir = encrypt ? &ctx->encrypt : &ctx->decrypt; in setup_auth() [all …]
|
/Linux-v5.15/drivers/crypto/caam/ |
D | caamalg.c | 336 * AES GCM encrypt shared descriptor in gcm_set_sh_desc() 401 * RFC4106 encrypt shared descriptor in rfc4106_set_sh_desc() 469 * RFC4543 encrypt shared descriptor in rfc4543_set_sh_desc() 1057 bool all_contig, bool encrypt) in init_aead_job() argument 1069 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; in init_aead_job() 1070 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; in init_aead_job() 1106 if (encrypt) in init_aead_job() 1118 bool all_contig, bool encrypt) in init_gcm_job() argument 1127 init_aead_job(req, edesc, all_contig, encrypt); in init_gcm_job() 1132 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job() [all …]
|
D | caamalg_qi.c | 258 if (ctx->drv_ctx[ENCRYPT]) { in aead_setkey() 259 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in aead_setkey() 382 if (ctx->drv_ctx[ENCRYPT]) { in gcm_setkey() 383 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in gcm_setkey() 490 if (ctx->drv_ctx[ENCRYPT]) { in rfc4106_setkey() 491 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in rfc4106_setkey() 596 if (ctx->drv_ctx[ENCRYPT]) { in rfc4543_setkey() 597 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT], in rfc4543_setkey() 636 /* skcipher encrypt, decrypt shared descriptors */ in skcipher_setkey() 643 if (ctx->drv_ctx[ENCRYPT]) { in skcipher_setkey() [all …]
|
D | caamalg_qi2.c | 235 flc = &ctx->flc[ENCRYPT]; in aead_set_sh_desc() 249 dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT], in aead_set_sh_desc() 350 bool encrypt) in aead_edesc_alloc() argument 381 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc() 426 (encrypt ? authsize : 0); in aead_edesc_alloc() 445 if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv) in aead_edesc_alloc() 506 * to skip it when we authenticate or encrypt... in aead_edesc_alloc() 547 (encrypt ? ctx->authsize : (-ctx->authsize)); in aead_edesc_alloc() 599 flc = &ctx->flc[ENCRYPT]; in chachapoly_set_sh_desc() 604 dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT], in chachapoly_set_sh_desc() [all …]
|
/Linux-v5.15/arch/sparc/crypto/ |
D | des_glue.c | 95 static int __ecb_crypt(struct skcipher_request *req, bool encrypt) in __ecb_crypt() argument 107 if (encrypt) in __ecb_crypt() 136 static int __cbc_crypt(struct skcipher_request *req, bool encrypt) in __cbc_crypt() argument 148 if (encrypt) in __cbc_crypt() 153 if (encrypt) in __cbc_crypt() 244 static int __ecb3_crypt(struct skcipher_request *req, bool encrypt) in __ecb3_crypt() argument 257 if (encrypt) in __ecb3_crypt() 290 static int __cbc3_crypt(struct skcipher_request *req, bool encrypt) in __cbc3_crypt() argument 303 if (encrypt) in __cbc3_crypt() 309 if (encrypt) in __cbc3_crypt() [all …]
|
D | aes_glue.c | 36 void (*encrypt)(const u64 *key, const u32 *input, u32 *output); member 130 .encrypt = aes_sparc64_encrypt_128, 142 .encrypt = aes_sparc64_encrypt_192, 154 .encrypt = aes_sparc64_encrypt_256, 209 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); in crypto_aes_encrypt() 394 .encrypt = ecb_encrypt, 408 .encrypt = cbc_encrypt, 422 .encrypt = ctr_crypt,
|
/Linux-v5.15/net/ceph/ |
D | crypto.c | 151 * Currently these are encrypt out-buffer (ceph_buffer) and decrypt 219 static int ceph_aes_crypt(const struct ceph_crypto_key *key, bool encrypt, in ceph_aes_crypt() argument 227 int crypt_len = encrypt ? in_len + pad_byte : in_len; in ceph_aes_crypt() 231 if (encrypt) in ceph_aes_crypt() 248 if (encrypt) in ceph_aes_crypt() 255 encrypt ? "en" : "de", ret); in ceph_aes_crypt() 263 if (encrypt) { in ceph_aes_crypt() 283 int ceph_crypt(const struct ceph_crypto_key *key, bool encrypt, in ceph_crypt() argument 291 return ceph_aes_crypt(key, encrypt, buf, buf_len, in_len, in ceph_crypt()
|
/Linux-v5.15/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 38 bool encrypt; member 66 /* Finish the encrypt or decrypt process */ in virtio_crypto_dataq_sym_callback() 118 int encrypt) in virtio_crypto_alg_skcipher_init_session() argument 123 int op = encrypt ? VIRTIO_CRYPTO_OP_ENCRYPT : VIRTIO_CRYPTO_OP_DECRYPT; in virtio_crypto_alg_skcipher_init_session() 191 if (encrypt) in virtio_crypto_alg_skcipher_init_session() 206 int encrypt) in virtio_crypto_alg_skcipher_close_session() argument 225 if (encrypt) in virtio_crypto_alg_skcipher_close_session() 387 if (vc_sym_req->encrypt) { in __virtio_crypto_skcipher_do_req() 441 if (!vc_sym_req->encrypt) in __virtio_crypto_skcipher_do_req() 502 vc_sym_req->encrypt = true; in virtio_crypto_skcipher_encrypt() [all …]
|
/Linux-v5.15/fs/crypto/ |
D | crypto.c | 92 /* Encrypt or decrypt a single filesystem block of file contents */ 140 * fscrypt_encrypt_pagecache_blocks() - Encrypt filesystem blocks from a 142 * @page: The locked pagecache page containing the block(s) to encrypt 143 * @len: Total size of the block(s) to encrypt. Must be a nonzero 145 * @offs: Byte offset within @page of the first block to encrypt. Must be 205 * fscrypt_encrypt_block_inplace() - Encrypt a filesystem block in-place 207 * @page: The page containing the block to encrypt 208 * @len: Size of block to encrypt. Doesn't need to be a multiple of the 210 * @offs: Byte offset within @page at which the block to encrypt begins 215 * Encrypt a possibly-compressed filesystem block that is located in an
|
/Linux-v5.15/drivers/crypto/cavium/nitrox/ |
D | nitrox_skcipher.c | 92 if (nkreq->creq.ctrl.s.arg == ENCRYPT) { in nitrox_cbc_cipher_callback() 264 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); in nitrox_skcipher_crypt() 401 .encrypt = nitrox_aes_encrypt, 420 .encrypt = nitrox_aes_encrypt, 439 .encrypt = nitrox_aes_encrypt, 458 .encrypt = nitrox_aes_encrypt, 479 .encrypt = nitrox_aes_encrypt, 496 .encrypt = nitrox_aes_encrypt, 515 .encrypt = nitrox_3des_encrypt, 534 .encrypt = nitrox_3des_encrypt,
|
/Linux-v5.15/drivers/crypto/ccree/ |
D | cc_cipher.c | 898 "Encrypt" : "Decrypt"), req, iv, nbytes); in cc_cipher_process() 1016 .encrypt = cc_cipher_encrypt, 1034 .encrypt = cc_cipher_encrypt, 1052 .encrypt = cc_cipher_encrypt, 1070 .encrypt = cc_cipher_encrypt, 1088 .encrypt = cc_cipher_encrypt, 1106 .encrypt = cc_cipher_encrypt, 1124 .encrypt = cc_cipher_encrypt, 1146 .encrypt = cc_cipher_encrypt, 1163 .encrypt = cc_cipher_encrypt, [all …]
|
/Linux-v5.15/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 656 /* For encrypt take the last output word */ in safexcel_handle_req_result() 695 * AEAD has auth tag appended to output for encrypt and in safexcel_send_req() 1277 .encrypt = safexcel_encrypt, 1314 .encrypt = safexcel_encrypt, 1352 .encrypt = safexcel_encrypt, 1390 .encrypt = safexcel_encrypt, 1464 .encrypt = safexcel_encrypt, 1526 .encrypt = safexcel_encrypt, 1565 .encrypt = safexcel_encrypt, 1625 .encrypt = safexcel_encrypt, [all …]
|
/Linux-v5.15/include/crypto/ |
D | skcipher.h | 17 * @cryptlen: Number of bytes to encrypt or decrypt 69 * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt 80 * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt 94 * IV of exactly that size to perform the encrypt or decrypt operation. 107 int (*encrypt)(struct skcipher_request *req); member 409 * crypto_skcipher_encrypt() - encrypt plaintext 413 * Encrypt plaintext data using the skcipher_request handle. That data 488 * encrypt and decrypt API calls. During the allocation, the provided skcipher
|
/Linux-v5.15/arch/x86/crypto/ |
D | sm4_aesni_avx2_glue.c | 71 .encrypt = sm4_avx_ecb_encrypt, 88 .encrypt = sm4_cbc_encrypt, 106 .encrypt = sm4_cfb_encrypt, 124 .encrypt = ctr_crypt,
|
/Linux-v5.15/drivers/crypto/keembay/ |
D | keembay-ocs-aes-core.c | 64 * @instruction: Instruction to be executed (encrypt / decrypt). 82 * used for GCM encrypt / decrypt). 306 * Called by encrypt() / decrypt() skcipher functions. 561 /* For CTS Encrypt, swap last 2 blocks, if needed. */ in kmb_ocs_sk_run() 571 /* CBC encrypt case. */ in kmb_ocs_sk_run() 633 * Called by encrypt() / decrypt() aead functions. 766 * For encrypt: in kmb_ocs_aead_dma_prepare() 818 /* If this is not CCM encrypt, we are done. */ in kmb_ocs_aead_dma_prepare() 832 * For CCM encrypt the input and output linked lists contain in kmb_ocs_aead_dma_prepare() 930 /* For GCM encrypt, we must manually copy out_tag to DST sg. */ in kmb_ocs_aead_run() [all …]
|
/Linux-v5.15/lib/crypto/ |
D | chacha20poly1305.c | 213 int encrypt) in chacha20poly1305_crypt_sg_inplace() argument 262 if (!encrypt) in chacha20poly1305_crypt_sg_inplace() 292 if (encrypt) in chacha20poly1305_crypt_sg_inplace() 305 if (encrypt) { in chacha20poly1305_crypt_sg_inplace() 321 scatterwalk_map_and_copy(b.mac[encrypt], src, src_len, in chacha20poly1305_crypt_sg_inplace() 322 sizeof(b.mac[1]), encrypt); in chacha20poly1305_crypt_sg_inplace() 323 ret = encrypt || in chacha20poly1305_crypt_sg_inplace()
|
/Linux-v5.15/drivers/crypto/hisilicon/sec2/ |
D | sec_crypto.c | 107 if (req->c_req.encrypt) in sec_alloc_queue_id() 117 if (req->c_req.encrypt) in sec_free_queue_id() 867 if (!c_req->encrypt && ctx->alg_type == SEC_AEAD) { in GEN_SEC_SETKEY_FUNC() 962 if (!c_req->encrypt && ctx->alg_type == SEC_AEAD) { in sec_cipher_map() 1249 if (c_req->encrypt) in sec_skcipher_bd_fill() 1297 if (c_req->encrypt) in sec_skcipher_bd_fill_v3() 1345 if (req->c_req.encrypt) in sec_update_iv() 1401 ctx->c_ctx.c_mode == SEC_CMODE_CTR) && req->c_req.encrypt) in sec_skcipher_callback() 1451 if (!c_req->encrypt) in set_aead_auth_iv() 1586 sec_auth_bd_fill_xcm(auth_ctx, req->c_req.encrypt, req, sec_sqe); in sec_aead_bd_fill() [all …]
|
/Linux-v5.15/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 65 static int ccp_aes_crypt(struct skcipher_request *req, bool encrypt) in ccp_aes_crypt() argument 98 (encrypt) ? CCP_AES_ACTION_ENCRYPT : CCP_AES_ACTION_DECRYPT; in ccp_aes_crypt() 160 static int ccp_aes_rfc3686_crypt(struct skcipher_request *req, bool encrypt) in ccp_aes_rfc3686_crypt() argument 181 return ccp_aes_crypt(req, encrypt); in ccp_aes_rfc3686_crypt() 208 .encrypt = ccp_aes_encrypt, 226 .encrypt = ccp_aes_rfc3686_encrypt,
|
/Linux-v5.15/arch/arm/crypto/ |
D | chacha-glue.c | 209 .encrypt = chacha_arm, 224 .encrypt = xchacha_arm, 239 .encrypt = xchacha_arm, 259 .encrypt = chacha_neon, 275 .encrypt = xchacha_neon, 291 .encrypt = xchacha_neon,
|
D | aes-neonbs-glue.c | 344 static int __xts_crypt(struct skcipher_request *req, bool encrypt, in __xts_crypt() argument 377 int reorder_last_tweak = !encrypt && tail > 0; in __xts_crypt() 404 if (encrypt) in __xts_crypt() 439 .encrypt = ecb_encrypt, 456 .encrypt = cbc_encrypt, 475 .encrypt = ctr_encrypt, 491 .encrypt = ctr_encrypt_sync, 507 .encrypt = xts_encrypt,
|
/Linux-v5.15/drivers/staging/rtl8192u/ieee80211/ |
D | ieee80211_crypt.h | 36 /* encrypt/decrypt return < 0 on error or >= 0 on success. The return 60 /* maximum number of bytes added by encryption; encrypt buf is 62 * extra_postfix_len; encrypt need not use all this space, but
|
/Linux-v5.15/Documentation/filesystems/ |
D | fscrypt.rst | 26 at the block device level. This allows it to encrypt different files 30 However, except for filenames, fscrypt does not encrypt filesystem 290 key and a single filenames encryption key. To still encrypt different 540 encrypted, even if it is empty. Users who want to encrypt an entire 566 kernel config, and the superblock must have had the "encrypt" 567 feature flag enabled using ``tune2fs -O encrypt`` or ``mkfs.ext4 -O 568 encrypt``.) 1193 cannot encrypt data in-place in the page cache, since the cached 1194 plaintext must be preserved. Instead, filesystems must encrypt into a 1257 filesystem test suite. First, run all the tests in the "encrypt" [all …]
|
/Linux-v5.15/drivers/net/wireless/intel/ipw2x00/ |
D | libipw_tx.c | 148 /* To encrypt, frame format is: in libipw_encrypt_fragment() 249 int encrypt, host_encrypt, host_encrypt_msdu; in libipw_xmit() local 285 encrypt = !(ether_type == htons(ETH_P_PAE) && ieee->ieee802_1x) && in libipw_xmit() 286 ieee->sec.encrypt; in libipw_xmit() 288 host_encrypt = ieee->host_encrypt && encrypt && crypt; in libipw_xmit() 289 host_encrypt_msdu = ieee->host_encrypt_msdu && encrypt && crypt; in libipw_xmit() 291 if (!encrypt && ieee->ieee802_1x && in libipw_xmit() 336 /* Encrypt msdu first on the whole data packet. */ in libipw_xmit() 419 txb->encrypted = encrypt; in libipw_xmit()
|