/Linux-v5.4/crypto/ |
D | blkcipher.c | 35 struct blkcipher_walk *walk); 37 struct blkcipher_walk *walk); 39 static inline void blkcipher_map_src(struct blkcipher_walk *walk) in blkcipher_map_src() argument 41 walk->src.virt.addr = scatterwalk_map(&walk->in); in blkcipher_map_src() 44 static inline void blkcipher_map_dst(struct blkcipher_walk *walk) in blkcipher_map_dst() argument 46 walk->dst.virt.addr = scatterwalk_map(&walk->out); in blkcipher_map_dst() 49 static inline void blkcipher_unmap_src(struct blkcipher_walk *walk) in blkcipher_unmap_src() argument 51 scatterwalk_unmap(walk->src.virt.addr); in blkcipher_unmap_src() 54 static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk) in blkcipher_unmap_dst() argument 56 scatterwalk_unmap(walk->dst.virt.addr); in blkcipher_unmap_dst() [all …]
|
D | skcipher.c | 42 static int skcipher_walk_next(struct skcipher_walk *walk); 44 static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr) in skcipher_unmap() argument 46 if (PageHighMem(scatterwalk_page(walk))) in skcipher_unmap() 50 static inline void *skcipher_map(struct scatter_walk *walk) in skcipher_map() argument 52 struct page *page = scatterwalk_page(walk); in skcipher_map() 55 offset_in_page(walk->offset); in skcipher_map() 58 static inline void skcipher_map_src(struct skcipher_walk *walk) in skcipher_map_src() argument 60 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src() 63 static inline void skcipher_map_dst(struct skcipher_walk *walk) in skcipher_map_dst() argument 65 walk->dst.virt.addr = skcipher_map(&walk->out); in skcipher_map_dst() [all …]
|
D | ablkcipher.c | 40 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk) in __ablkcipher_walk_complete() argument 44 list_for_each_entry_safe(p, tmp, &walk->buffers, entry) { in __ablkcipher_walk_complete() 52 static inline void ablkcipher_queue_write(struct ablkcipher_walk *walk, in ablkcipher_queue_write() argument 55 p->dst = walk->out; in ablkcipher_queue_write() 56 list_add_tail(&p->entry, &walk->buffers); in ablkcipher_queue_write() 69 static inline void ablkcipher_done_slow(struct ablkcipher_walk *walk, in ablkcipher_done_slow() argument 73 unsigned int len_this_page = scatterwalk_pagelen(&walk->out); in ablkcipher_done_slow() 77 scatterwalk_advance(&walk->out, n); in ablkcipher_done_slow() 81 scatterwalk_start(&walk->out, sg_next(walk->out.sg)); in ablkcipher_done_slow() 85 static inline void ablkcipher_done_fast(struct ablkcipher_walk *walk, in ablkcipher_done_fast() argument [all …]
|
D | cfb.c | 42 static void crypto_cfb_final(struct skcipher_walk *walk, in crypto_cfb_final() argument 48 u8 *src = walk->src.virt.addr; in crypto_cfb_final() 49 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final() 50 u8 *iv = walk->iv; in crypto_cfb_final() 51 unsigned int nbytes = walk->nbytes; in crypto_cfb_final() 57 static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, in crypto_cfb_encrypt_segment() argument 61 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment() 62 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment() 63 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment() 64 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() [all …]
|
D | pcbc.c | 20 struct skcipher_walk *walk, in crypto_pcbc_encrypt_segment() argument 24 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() 25 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment() 26 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment() 27 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() 42 struct skcipher_walk *walk, in crypto_pcbc_encrypt_inplace() argument 46 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() 47 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace() 48 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() 67 struct skcipher_walk walk; in crypto_pcbc_encrypt() local [all …]
|
D | ahash.c | 40 static int hash_walk_next(struct crypto_hash_walk *walk) in hash_walk_next() argument 42 unsigned int alignmask = walk->alignmask; in hash_walk_next() 43 unsigned int offset = walk->offset; in hash_walk_next() 44 unsigned int nbytes = min(walk->entrylen, in hash_walk_next() 47 if (walk->flags & CRYPTO_ALG_ASYNC) in hash_walk_next() 48 walk->data = kmap(walk->pg); in hash_walk_next() 50 walk->data = kmap_atomic(walk->pg); in hash_walk_next() 51 walk->data += offset; in hash_walk_next() 60 walk->entrylen -= nbytes; in hash_walk_next() 64 static int hash_walk_new_entry(struct crypto_hash_walk *walk) in hash_walk_new_entry() argument [all …]
|
D | ofb.c | 22 struct skcipher_walk walk; in crypto_ofb_crypt() local 25 err = skcipher_walk_virt(&walk, req, false); in crypto_ofb_crypt() 27 while (walk.nbytes >= bsize) { in crypto_ofb_crypt() 28 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt() 29 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt() 30 u8 * const iv = walk.iv; in crypto_ofb_crypt() 31 unsigned int nbytes = walk.nbytes; in crypto_ofb_crypt() 40 err = skcipher_walk_done(&walk, nbytes); in crypto_ofb_crypt() 43 if (walk.nbytes) { in crypto_ofb_crypt() 44 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() [all …]
|
/Linux-v5.4/include/crypto/ |
D | scatterwalk.h | 28 static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk) in scatterwalk_pagelen() argument 30 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen() 31 unsigned int len_this_page = offset_in_page(~walk->offset) + 1; in scatterwalk_pagelen() 35 static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, in scatterwalk_clamp() argument 38 unsigned int len_this_page = scatterwalk_pagelen(walk); in scatterwalk_clamp() 42 static inline void scatterwalk_advance(struct scatter_walk *walk, in scatterwalk_advance() argument 45 walk->offset += nbytes; in scatterwalk_advance() 48 static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk, in scatterwalk_aligned() argument 51 return !(walk->offset & alignmask); in scatterwalk_aligned() 54 static inline struct page *scatterwalk_page(struct scatter_walk *walk) in scatterwalk_page() argument [all …]
|
D | cbc.h | 16 struct skcipher_walk *walk, struct crypto_skcipher *tfm, in crypto_cbc_encrypt_segment() argument 20 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() 21 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment() 22 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment() 23 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() 38 struct skcipher_walk *walk, struct crypto_skcipher *tfm, in crypto_cbc_encrypt_inplace() argument 42 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() 43 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace() 44 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() 54 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace() [all …]
|
/Linux-v5.4/mm/ |
D | pagewalk.c | 8 struct mm_walk *walk) in walk_pte_range() argument 12 const struct mm_walk_ops *ops = walk->ops; in walk_pte_range() 16 err = ops->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range() 30 struct mm_walk *walk) in walk_pmd_range() argument 34 const struct mm_walk_ops *ops = walk->ops; in walk_pmd_range() 41 if (pmd_none(*pmd) || !walk->vma) { in walk_pmd_range() 43 err = ops->pte_hole(addr, next, walk); in walk_pmd_range() 53 err = ops->pmd_entry(pmd, addr, next, walk); in walk_pmd_range() 64 split_huge_pmd(walk->vma, pmd, addr); in walk_pmd_range() 67 err = walk_pte_range(pmd, addr, next, walk); in walk_pmd_range() [all …]
|
/Linux-v5.4/arch/x86/crypto/ |
D | glue_helper.c | 26 struct skcipher_walk walk; in glue_ecb_req_128bit() local 31 err = skcipher_walk_virt(&walk, req, false); in glue_ecb_req_128bit() 33 while ((nbytes = walk.nbytes)) { in glue_ecb_req_128bit() 34 const u8 *src = walk.src.virt.addr; in glue_ecb_req_128bit() 35 u8 *dst = walk.dst.virt.addr; in glue_ecb_req_128bit() 40 &walk, fpu_enabled, nbytes); in glue_ecb_req_128bit() 58 err = skcipher_walk_done(&walk, nbytes); in glue_ecb_req_128bit() 71 struct skcipher_walk walk; in glue_cbc_encrypt_req_128bit() local 75 err = skcipher_walk_virt(&walk, req, false); in glue_cbc_encrypt_req_128bit() 77 while ((nbytes = walk.nbytes)) { in glue_cbc_encrypt_req_128bit() [all …]
|
D | cast5_avx_glue.c | 35 static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk, in cast5_fpu_begin() argument 39 walk, fpu_enabled, nbytes); in cast5_fpu_begin() 52 struct skcipher_walk walk; in ecb_crypt() local 58 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 60 while ((nbytes = walk.nbytes)) { in ecb_crypt() 61 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 62 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 64 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ecb_crypt() 93 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 115 struct skcipher_walk walk; in cbc_encrypt() local [all …]
|
D | blowfish_glue.c | 78 struct skcipher_walk walk; in ecb_crypt() local 82 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 84 while ((nbytes = walk.nbytes)) { in ecb_crypt() 85 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 86 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 112 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 129 struct skcipher_walk *walk) in __cbc_encrypt() argument 132 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() 133 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt() 134 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt() [all …]
|
D | des3_ede_glue.c | 79 struct skcipher_walk walk; in ecb_crypt() local 83 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 85 while ((nbytes = walk.nbytes)) { in ecb_crypt() 86 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 87 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 114 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 137 struct skcipher_walk *walk) in __cbc_encrypt() argument 140 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() 141 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt() 142 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt() [all …]
|
/Linux-v5.4/arch/arm/crypto/ |
D | aes-ce-glue.c | 181 struct skcipher_walk walk; in ecb_encrypt() local 185 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 187 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 189 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 192 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 201 struct skcipher_walk walk; in ecb_decrypt() local 205 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 207 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 209 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 212 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() [all …]
|
D | aes-neonbs-glue.c | 92 struct skcipher_walk walk; in __ecb_crypt() local 95 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt() 97 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 98 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 100 if (walk.nbytes < walk.total) in __ecb_crypt() 102 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 105 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 108 err = skcipher_walk_done(&walk, in __ecb_crypt() 109 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 161 struct skcipher_walk walk; in cbc_decrypt() local [all …]
|
/Linux-v5.4/arch/arm64/crypto/ |
D | aes-neonbs-glue.c | 103 struct skcipher_walk walk; in __ecb_crypt() local 106 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt() 108 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 109 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 111 if (walk.nbytes < walk.total) in __ecb_crypt() 113 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 116 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 119 err = skcipher_walk_done(&walk, in __ecb_crypt() 120 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 162 struct skcipher_walk walk; in cbc_encrypt() local [all …]
|
D | aes-glue.c | 196 struct skcipher_walk walk; in ecb_encrypt() local 199 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 201 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 203 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 206 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 216 struct skcipher_walk walk; in ecb_decrypt() local 219 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 221 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 223 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 226 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() [all …]
|
D | aes-ce-ccm-glue.c | 145 struct scatter_walk walk; in ccm_calculate_auth_mac() local 160 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac() 163 u32 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac() 167 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac() 168 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac() 170 p = scatterwalk_map(&walk); in ccm_calculate_auth_mac() 175 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac() 176 scatterwalk_done(&walk, 0, len); in ccm_calculate_auth_mac() 180 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[], in ccm_crypt_fallback() argument 186 while (walk->nbytes) { in ccm_crypt_fallback() [all …]
|
D | ghash-ce-glue.c | 370 struct scatter_walk walk; in gcm_calculate_auth_mac() local 374 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac() 377 u32 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac() 381 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac() 382 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac() 384 p = scatterwalk_map(&walk); in gcm_calculate_auth_mac() 390 scatterwalk_advance(&walk, n); in gcm_calculate_auth_mac() 391 scatterwalk_done(&walk, 0, len); in gcm_calculate_auth_mac() 423 struct skcipher_walk walk; in gcm_encrypt() local 437 err = skcipher_walk_aead_encrypt(&walk, req, false); in gcm_encrypt() [all …]
|
/Linux-v5.4/arch/s390/crypto/ |
D | des_s390.c | 83 struct blkcipher_walk *walk) in ecb_desall_crypt() argument 89 ret = blkcipher_walk_virt(desc, walk); in ecb_desall_crypt() 90 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in ecb_desall_crypt() 93 cpacf_km(fc, ctx->key, walk->dst.virt.addr, in ecb_desall_crypt() 94 walk->src.virt.addr, n); in ecb_desall_crypt() 95 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_desall_crypt() 101 struct blkcipher_walk *walk) in cbc_desall_crypt() argument 111 ret = blkcipher_walk_virt(desc, walk); in cbc_desall_crypt() 112 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); in cbc_desall_crypt() 114 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in cbc_desall_crypt() [all …]
|
D | paes_s390.c | 162 struct blkcipher_walk *walk) in ecb_paes_crypt() argument 168 ret = blkcipher_walk_virt(desc, walk); in ecb_paes_crypt() 169 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_paes_crypt() 173 walk->dst.virt.addr, walk->src.virt.addr, n); in ecb_paes_crypt() 175 ret = blkcipher_walk_done(desc, walk, nbytes - k); in ecb_paes_crypt() 178 return blkcipher_walk_done(desc, walk, -EIO); in ecb_paes_crypt() 188 struct blkcipher_walk walk; in ecb_paes_encrypt() local 190 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_paes_encrypt() 191 return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk); in ecb_paes_encrypt() 198 struct blkcipher_walk walk; in ecb_paes_decrypt() local [all …]
|
/Linux-v5.4/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 185 struct blkcipher_walk walk; in ppc_ecb_encrypt() local 190 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt() 191 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_encrypt() 193 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt() 199 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_encrypt() 203 err = blkcipher_walk_done(desc, &walk, ubytes); in ppc_ecb_encrypt() 213 struct blkcipher_walk walk; in ppc_ecb_decrypt() local 218 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt() 219 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_decrypt() 221 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt() [all …]
|
/Linux-v5.4/arch/sparc/crypto/ |
D | des_glue.c | 95 struct blkcipher_walk walk; in __ecb_crypt() local 98 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 99 err = blkcipher_walk_virt(desc, &walk); in __ecb_crypt() 106 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 110 des_sparc64_ecb_crypt((const u64 *)walk.src.virt.addr, in __ecb_crypt() 111 (u64 *) walk.dst.virt.addr, in __ecb_crypt() 115 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 143 struct blkcipher_walk walk; in cbc_encrypt() local 146 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt() 147 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt() [all …]
|
D | aes_glue.c | 221 struct blkcipher_walk walk; in ecb_encrypt() local 224 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 225 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt() 229 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 234 (const u64 *)walk.src.virt.addr, in ecb_encrypt() 235 (u64 *) walk.dst.virt.addr, in ecb_encrypt() 239 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt() 250 struct blkcipher_walk walk; in ecb_decrypt() local 254 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 255 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt() [all …]
|