Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 25 of 218) sorted by relevance

123456789

/Linux-v4.19/crypto/
Dblkcipher.c40 struct blkcipher_walk *walk);
42 struct blkcipher_walk *walk);
44 static inline void blkcipher_map_src(struct blkcipher_walk *walk) in blkcipher_map_src() argument
46 walk->src.virt.addr = scatterwalk_map(&walk->in); in blkcipher_map_src()
49 static inline void blkcipher_map_dst(struct blkcipher_walk *walk) in blkcipher_map_dst() argument
51 walk->dst.virt.addr = scatterwalk_map(&walk->out); in blkcipher_map_dst()
54 static inline void blkcipher_unmap_src(struct blkcipher_walk *walk) in blkcipher_unmap_src() argument
56 scatterwalk_unmap(walk->src.virt.addr); in blkcipher_unmap_src()
59 static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk) in blkcipher_unmap_dst() argument
61 scatterwalk_unmap(walk->dst.virt.addr); in blkcipher_unmap_dst()
[all …]
Dskcipher.c47 static int skcipher_walk_next(struct skcipher_walk *walk);
49 static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr) in skcipher_unmap() argument
51 if (PageHighMem(scatterwalk_page(walk))) in skcipher_unmap()
55 static inline void *skcipher_map(struct scatter_walk *walk) in skcipher_map() argument
57 struct page *page = scatterwalk_page(walk); in skcipher_map()
60 offset_in_page(walk->offset); in skcipher_map()
63 static inline void skcipher_map_src(struct skcipher_walk *walk) in skcipher_map_src() argument
65 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src()
68 static inline void skcipher_map_dst(struct skcipher_walk *walk) in skcipher_map_dst() argument
70 walk->dst.virt.addr = skcipher_map(&walk->out); in skcipher_map_dst()
[all …]
Dablkcipher.c45 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk) in __ablkcipher_walk_complete() argument
49 list_for_each_entry_safe(p, tmp, &walk->buffers, entry) { in __ablkcipher_walk_complete()
57 static inline void ablkcipher_queue_write(struct ablkcipher_walk *walk, in ablkcipher_queue_write() argument
60 p->dst = walk->out; in ablkcipher_queue_write()
61 list_add_tail(&p->entry, &walk->buffers); in ablkcipher_queue_write()
74 static inline void ablkcipher_done_slow(struct ablkcipher_walk *walk, in ablkcipher_done_slow() argument
78 unsigned int len_this_page = scatterwalk_pagelen(&walk->out); in ablkcipher_done_slow()
82 scatterwalk_advance(&walk->out, n); in ablkcipher_done_slow()
86 scatterwalk_start(&walk->out, sg_next(walk->out.sg)); in ablkcipher_done_slow()
90 static inline void ablkcipher_done_fast(struct ablkcipher_walk *walk, in ablkcipher_done_fast() argument
[all …]
Dcfb.c53 static void crypto_cfb_final(struct skcipher_walk *walk, in crypto_cfb_final() argument
59 u8 *src = walk->src.virt.addr; in crypto_cfb_final()
60 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final()
61 u8 *iv = walk->iv; in crypto_cfb_final()
62 unsigned int nbytes = walk->nbytes; in crypto_cfb_final()
68 static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, in crypto_cfb_encrypt_segment() argument
72 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment()
73 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment()
74 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment()
75 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment()
[all …]
Dahash.c45 static int hash_walk_next(struct crypto_hash_walk *walk) in hash_walk_next() argument
47 unsigned int alignmask = walk->alignmask; in hash_walk_next()
48 unsigned int offset = walk->offset; in hash_walk_next()
49 unsigned int nbytes = min(walk->entrylen, in hash_walk_next()
52 if (walk->flags & CRYPTO_ALG_ASYNC) in hash_walk_next()
53 walk->data = kmap(walk->pg); in hash_walk_next()
55 walk->data = kmap_atomic(walk->pg); in hash_walk_next()
56 walk->data += offset; in hash_walk_next()
65 walk->entrylen -= nbytes; in hash_walk_next()
69 static int hash_walk_new_entry(struct crypto_hash_walk *walk) in hash_walk_new_entry() argument
[all …]
Dpcbc.c47 struct skcipher_walk *walk, in crypto_pcbc_encrypt_segment() argument
51 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment()
52 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment()
53 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment()
54 u8 *iv = walk->iv; in crypto_pcbc_encrypt_segment()
69 struct skcipher_walk *walk, in crypto_pcbc_encrypt_inplace() argument
73 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace()
74 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace()
75 u8 *iv = walk->iv; in crypto_pcbc_encrypt_inplace()
87 memcpy(walk->iv, iv, bsize); in crypto_pcbc_encrypt_inplace()
[all …]
/Linux-v4.19/mm/
Dpagewalk.c8 struct mm_walk *walk) in walk_pte_range() argument
15 err = walk->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range()
29 struct mm_walk *walk) in walk_pmd_range() argument
39 if (pmd_none(*pmd) || !walk->vma) { in walk_pmd_range()
40 if (walk->pte_hole) in walk_pmd_range()
41 err = walk->pte_hole(addr, next, walk); in walk_pmd_range()
50 if (walk->pmd_entry) in walk_pmd_range()
51 err = walk->pmd_entry(pmd, addr, next, walk); in walk_pmd_range()
59 if (!walk->pte_entry) in walk_pmd_range()
62 split_huge_pmd(walk->vma, pmd, addr); in walk_pmd_range()
[all …]
/Linux-v4.19/include/crypto/
Dscatterwalk.h33 static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk) in scatterwalk_pagelen() argument
35 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen()
36 unsigned int len_this_page = offset_in_page(~walk->offset) + 1; in scatterwalk_pagelen()
40 static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, in scatterwalk_clamp() argument
43 unsigned int len_this_page = scatterwalk_pagelen(walk); in scatterwalk_clamp()
47 static inline void scatterwalk_advance(struct scatter_walk *walk, in scatterwalk_advance() argument
50 walk->offset += nbytes; in scatterwalk_advance()
53 static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk, in scatterwalk_aligned() argument
56 return !(walk->offset & alignmask); in scatterwalk_aligned()
59 static inline struct page *scatterwalk_page(struct scatter_walk *walk) in scatterwalk_page() argument
[all …]
Dcbc.h21 struct skcipher_walk *walk, struct crypto_skcipher *tfm, in crypto_cbc_encrypt_segment() argument
25 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment()
26 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment()
27 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment()
28 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment()
43 struct skcipher_walk *walk, struct crypto_skcipher *tfm, in crypto_cbc_encrypt_inplace() argument
47 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace()
48 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace()
49 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace()
59 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace()
[all …]
/Linux-v4.19/arch/x86/crypto/
Dglue_helper.c40 struct skcipher_walk walk; in glue_ecb_req_128bit() local
45 err = skcipher_walk_virt(&walk, req, false); in glue_ecb_req_128bit()
47 while ((nbytes = walk.nbytes)) { in glue_ecb_req_128bit()
48 const u8 *src = walk.src.virt.addr; in glue_ecb_req_128bit()
49 u8 *dst = walk.dst.virt.addr; in glue_ecb_req_128bit()
54 &walk, fpu_enabled, nbytes); in glue_ecb_req_128bit()
72 err = skcipher_walk_done(&walk, nbytes); in glue_ecb_req_128bit()
85 struct skcipher_walk walk; in glue_cbc_encrypt_req_128bit() local
89 err = skcipher_walk_virt(&walk, req, false); in glue_cbc_encrypt_req_128bit()
91 while ((nbytes = walk.nbytes)) { in glue_cbc_encrypt_req_128bit()
[all …]
Dcast5_avx_glue.c50 static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk, in cast5_fpu_begin() argument
54 walk, fpu_enabled, nbytes); in cast5_fpu_begin()
67 struct skcipher_walk walk; in ecb_crypt() local
73 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt()
75 while ((nbytes = walk.nbytes)) { in ecb_crypt()
76 u8 *wsrc = walk.src.virt.addr; in ecb_crypt()
77 u8 *wdst = walk.dst.virt.addr; in ecb_crypt()
79 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ecb_crypt()
108 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
130 struct skcipher_walk walk; in cbc_encrypt() local
[all …]
Dblowfish_glue.c93 struct skcipher_walk walk; in ecb_crypt() local
97 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt()
99 while ((nbytes = walk.nbytes)) { in ecb_crypt()
100 u8 *wsrc = walk.src.virt.addr; in ecb_crypt()
101 u8 *wdst = walk.dst.virt.addr; in ecb_crypt()
127 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
144 struct skcipher_walk *walk) in __cbc_encrypt() argument
147 unsigned int nbytes = walk->nbytes; in __cbc_encrypt()
148 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt()
149 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt()
[all …]
Ddes3_ede_glue.c89 struct skcipher_walk walk; in ecb_crypt() local
93 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt()
95 while ((nbytes = walk.nbytes)) { in ecb_crypt()
96 u8 *wsrc = walk.src.virt.addr; in ecb_crypt()
97 u8 *wdst = walk.dst.virt.addr; in ecb_crypt()
124 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
147 struct skcipher_walk *walk) in __cbc_encrypt() argument
150 unsigned int nbytes = walk->nbytes; in __cbc_encrypt()
151 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt()
152 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt()
[all …]
/Linux-v4.19/arch/arm/crypto/
Daes-ce-glue.c181 struct skcipher_walk walk; in ecb_encrypt() local
185 err = skcipher_walk_virt(&walk, req, true); in ecb_encrypt()
188 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
189 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
191 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
201 struct skcipher_walk walk; in ecb_decrypt() local
205 err = skcipher_walk_virt(&walk, req, true); in ecb_decrypt()
208 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
209 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
211 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
[all …]
Daes-neonbs-glue.c86 struct skcipher_walk walk; in __ecb_crypt() local
89 err = skcipher_walk_virt(&walk, req, true); in __ecb_crypt()
92 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
93 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
95 if (walk.nbytes < walk.total) in __ecb_crypt()
97 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
99 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
101 err = skcipher_walk_done(&walk, in __ecb_crypt()
102 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
155 struct skcipher_walk walk; in cbc_decrypt() local
[all …]
/Linux-v4.19/arch/arm64/crypto/
Daes-neonbs-glue.c99 struct skcipher_walk walk; in __ecb_crypt() local
102 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt()
104 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
105 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
107 if (walk.nbytes < walk.total) in __ecb_crypt()
109 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
112 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
115 err = skcipher_walk_done(&walk, in __ecb_crypt()
116 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
158 struct skcipher_walk walk; in cbc_encrypt() local
[all …]
Daes-ce-ccm-glue.c153 struct scatter_walk walk; in ccm_calculate_auth_mac() local
168 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
171 u32 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac()
175 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac()
176 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac()
178 p = scatterwalk_map(&walk); in ccm_calculate_auth_mac()
183 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac()
184 scatterwalk_done(&walk, 0, len); in ccm_calculate_auth_mac()
188 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[], in ccm_crypt_fallback() argument
194 while (walk->nbytes) { in ccm_crypt_fallback()
[all …]
Dghash-ce-glue.c322 struct scatter_walk walk; in gcm_calculate_auth_mac() local
326 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac()
329 u32 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac()
333 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac()
334 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac()
336 p = scatterwalk_map(&walk); in gcm_calculate_auth_mac()
342 scatterwalk_advance(&walk, n); in gcm_calculate_auth_mac()
343 scatterwalk_done(&walk, 0, len); in gcm_calculate_auth_mac()
373 struct skcipher_walk walk; in gcm_encrypt() local
387 err = skcipher_walk_aead_encrypt(&walk, req, false); in gcm_encrypt()
[all …]
Daes-glue.c137 struct skcipher_walk walk; in ecb_encrypt() local
140 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt()
142 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
144 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
147 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
157 struct skcipher_walk walk; in ecb_decrypt() local
160 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt()
162 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
164 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
167 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
[all …]
Daes-ctr-fallback.h20 struct skcipher_walk walk; in aes_ctr_encrypt_fallback() local
24 err = skcipher_walk_virt(&walk, req, true); in aes_ctr_encrypt_fallback()
26 while (walk.nbytes > 0) { in aes_ctr_encrypt_fallback()
27 u8 *dst = walk.dst.virt.addr; in aes_ctr_encrypt_fallback()
28 u8 *src = walk.src.virt.addr; in aes_ctr_encrypt_fallback()
29 int nbytes = walk.nbytes; in aes_ctr_encrypt_fallback()
32 if (nbytes < walk.total) { in aes_ctr_encrypt_fallback()
34 tail = walk.nbytes % AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
40 __aes_arm64_encrypt(ctx->key_enc, buf, walk.iv, in aes_ctr_encrypt_fallback()
43 crypto_inc(walk.iv, AES_BLOCK_SIZE); in aes_ctr_encrypt_fallback()
[all …]
/Linux-v4.19/arch/s390/crypto/
Ddes_s390.c85 struct blkcipher_walk *walk) in ecb_desall_crypt() argument
91 ret = blkcipher_walk_virt(desc, walk); in ecb_desall_crypt()
92 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in ecb_desall_crypt()
95 cpacf_km(fc, ctx->key, walk->dst.virt.addr, in ecb_desall_crypt()
96 walk->src.virt.addr, n); in ecb_desall_crypt()
97 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_desall_crypt()
103 struct blkcipher_walk *walk) in cbc_desall_crypt() argument
113 ret = blkcipher_walk_virt(desc, walk); in cbc_desall_crypt()
114 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
116 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in cbc_desall_crypt()
[all …]
Dpaes_s390.c96 struct blkcipher_walk *walk) in ecb_paes_crypt() argument
102 ret = blkcipher_walk_virt(desc, walk); in ecb_paes_crypt()
103 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_paes_crypt()
107 walk->dst.virt.addr, walk->src.virt.addr, n); in ecb_paes_crypt()
109 ret = blkcipher_walk_done(desc, walk, nbytes - k); in ecb_paes_crypt()
112 return blkcipher_walk_done(desc, walk, -EIO); in ecb_paes_crypt()
122 struct blkcipher_walk walk; in ecb_paes_encrypt() local
124 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_paes_encrypt()
125 return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk); in ecb_paes_encrypt()
132 struct blkcipher_walk walk; in ecb_paes_decrypt() local
[all …]
/Linux-v4.19/arch/powerpc/crypto/
Daes-spe-glue.c190 struct blkcipher_walk walk; in ppc_ecb_encrypt() local
195 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt()
196 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_encrypt()
198 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt()
204 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_encrypt()
208 err = blkcipher_walk_done(desc, &walk, ubytes); in ppc_ecb_encrypt()
218 struct blkcipher_walk walk; in ppc_ecb_decrypt() local
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt()
224 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_decrypt()
226 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt()
[all …]
/Linux-v4.19/arch/sparc/crypto/
Daes_glue.c220 struct blkcipher_walk walk; in ecb_encrypt() local
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
224 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
228 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
233 (const u64 *)walk.src.virt.addr, in ecb_encrypt()
234 (u64 *) walk.dst.virt.addr, in ecb_encrypt()
238 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
249 struct blkcipher_walk walk; in ecb_decrypt() local
253 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
254 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
[all …]
Ddes_glue.c98 struct blkcipher_walk walk; in __ecb_crypt() local
101 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
102 err = blkcipher_walk_virt(desc, &walk); in __ecb_crypt()
109 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
113 des_sparc64_ecb_crypt((const u64 *)walk.src.virt.addr, in __ecb_crypt()
114 (u64 *) walk.dst.virt.addr, in __ecb_crypt()
118 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
146 struct blkcipher_walk walk; in cbc_encrypt() local
149 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt()
150 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt()
[all …]

123456789