Home
last modified time | relevance | path

Searched refs:nbytes (Results 1 – 25 of 409) sorted by relevance

12345678910>>...17

/Linux-v4.19/arch/x86/crypto/
Dglue_helper.c42 unsigned int nbytes; in glue_ecb_req_128bit() local
47 while ((nbytes = walk.nbytes)) { in glue_ecb_req_128bit()
54 &walk, fpu_enabled, nbytes); in glue_ecb_req_128bit()
58 if (nbytes < func_bytes) in glue_ecb_req_128bit()
66 nbytes -= func_bytes; in glue_ecb_req_128bit()
67 } while (nbytes >= func_bytes); in glue_ecb_req_128bit()
69 if (nbytes < bsize) in glue_ecb_req_128bit()
72 err = skcipher_walk_done(&walk, nbytes); in glue_ecb_req_128bit()
86 unsigned int nbytes; in glue_cbc_encrypt_req_128bit() local
91 while ((nbytes = walk.nbytes)) { in glue_cbc_encrypt_req_128bit()
[all …]
Dcast5_avx_glue.c51 unsigned int nbytes) in cast5_fpu_begin() argument
54 walk, fpu_enabled, nbytes); in cast5_fpu_begin()
69 unsigned int nbytes; in ecb_crypt() local
75 while ((nbytes = walk.nbytes)) { in ecb_crypt()
79 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ecb_crypt()
82 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { in ecb_crypt()
89 nbytes -= bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt()
90 } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); in ecb_crypt()
92 if (nbytes < bsize) in ecb_crypt()
104 nbytes -= bsize; in ecb_crypt()
[all …]
Dblowfish_glue.c94 unsigned int nbytes; in ecb_crypt() local
99 while ((nbytes = walk.nbytes)) { in ecb_crypt()
104 if (nbytes >= bsize * 4) { in ecb_crypt()
110 nbytes -= bsize * 4; in ecb_crypt()
111 } while (nbytes >= bsize * 4); in ecb_crypt()
113 if (nbytes < bsize) in ecb_crypt()
123 nbytes -= bsize; in ecb_crypt()
124 } while (nbytes >= bsize); in ecb_crypt()
127 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
147 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local
[all …]
Ddes3_ede_glue.c90 unsigned int nbytes; in ecb_crypt() local
95 while ((nbytes = walk.nbytes)) { in ecb_crypt()
100 if (nbytes >= bsize * 3) { in ecb_crypt()
107 nbytes -= bsize * 3; in ecb_crypt()
108 } while (nbytes >= bsize * 3); in ecb_crypt()
110 if (nbytes < bsize) in ecb_crypt()
120 nbytes -= bsize; in ecb_crypt()
121 } while (nbytes >= bsize); in ecb_crypt()
124 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
150 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local
[all …]
/Linux-v4.19/arch/powerpc/crypto/
Daes-spe-glue.c187 struct scatterlist *src, unsigned int nbytes) in ppc_ecb_encrypt() argument
195 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt()
198 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt()
199 ubytes = nbytes > MAX_BYTES ? in ppc_ecb_encrypt()
200 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt()
201 nbytes -= ubytes; in ppc_ecb_encrypt()
205 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_encrypt()
215 struct scatterlist *src, unsigned int nbytes) in ppc_ecb_decrypt() argument
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt()
226 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt()
[all …]
/Linux-v4.19/lib/mpi/
Dmpicoder.c35 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes) in mpi_read_raw_data() argument
43 while (nbytes > 0 && buffer[0] == 0) { in mpi_read_raw_data()
45 nbytes--; in mpi_read_raw_data()
48 nbits = nbytes * 8; in mpi_read_raw_data()
53 if (nbytes > 0) in mpi_read_raw_data()
56 nlimbs = DIV_ROUND_UP(nbytes, BYTES_PER_MPI_LIMB); in mpi_read_raw_data()
64 if (nbytes > 0) { in mpi_read_raw_data()
65 i = BYTES_PER_MPI_LIMB - nbytes % BYTES_PER_MPI_LIMB; in mpi_read_raw_data()
84 unsigned int nbits, nbytes; in mpi_read_from_buffer() local
96 nbytes = DIV_ROUND_UP(nbits, 8); in mpi_read_from_buffer()
[all …]
/Linux-v4.19/arch/sparc/crypto/
Ddes_glue.c95 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument
101 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
109 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
110 unsigned int block_len = nbytes & DES_BLOCK_MASK; in __ecb_crypt()
117 nbytes &= DES_BLOCK_SIZE - 1; in __ecb_crypt()
118 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
126 unsigned int nbytes) in ecb_encrypt() argument
128 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt()
133 unsigned int nbytes) in ecb_decrypt() argument
135 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt()
[all …]
Dcamellia_glue.c87 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument
99 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
108 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
109 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK; in __ecb_crypt()
119 nbytes &= CAMELLIA_BLOCK_SIZE - 1; in __ecb_crypt()
120 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
128 unsigned int nbytes) in ecb_encrypt() argument
130 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt()
135 unsigned int nbytes) in ecb_decrypt() argument
137 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt()
[all …]
Daes_glue.c217 unsigned int nbytes) in ecb_encrypt() argument
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
228 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
229 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_encrypt()
237 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
238 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
246 unsigned int nbytes) in ecb_decrypt() argument
253 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
259 while ((nbytes = walk.nbytes)) { in ecb_decrypt()
260 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_decrypt()
[all …]
/Linux-v4.19/include/crypto/
Dcbc.h25 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() local
37 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment()
39 return nbytes; in crypto_cbc_encrypt_segment()
47 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() local
57 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_inplace()
61 return nbytes; in crypto_cbc_encrypt_inplace()
74 while (walk.nbytes) { in crypto_cbc_encrypt_walk()
90 unsigned int nbytes = walk->nbytes; in crypto_cbc_decrypt_segment() local
102 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_decrypt_segment()
106 return nbytes; in crypto_cbc_decrypt_segment()
[all …]
/Linux-v4.19/drivers/staging/comedi/
Dcomedi_buf.c291 unsigned int nbytes) in comedi_buf_write_alloc() argument
296 if (nbytes > unalloc) in comedi_buf_write_alloc()
297 nbytes = unalloc; in comedi_buf_write_alloc()
299 async->buf_write_alloc_count += nbytes; in comedi_buf_write_alloc()
307 return nbytes; in comedi_buf_write_alloc()
381 unsigned int nbytes) in comedi_buf_write_free() argument
386 if (nbytes > allocated) in comedi_buf_write_free()
387 nbytes = allocated; in comedi_buf_write_free()
389 async->buf_write_count += nbytes; in comedi_buf_write_free()
390 async->buf_write_ptr += nbytes; in comedi_buf_write_free()
[all …]
/Linux-v4.19/arch/s390/crypto/
Ddes_s390.c88 unsigned int nbytes, n; in ecb_desall_crypt() local
92 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in ecb_desall_crypt()
94 n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt()
97 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_desall_crypt()
106 unsigned int nbytes, n; in cbc_desall_crypt() local
116 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in cbc_desall_crypt()
118 n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt()
121 ret = blkcipher_walk_done(desc, walk, nbytes - n); in cbc_desall_crypt()
129 unsigned int nbytes) in ecb_des_encrypt() argument
133 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_des_encrypt()
[all …]
Dpaes_s390.c99 unsigned int nbytes, n, k; in ecb_paes_crypt() local
103 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_paes_crypt()
105 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt()
109 ret = blkcipher_walk_done(desc, walk, nbytes - k); in ecb_paes_crypt()
120 unsigned int nbytes) in ecb_paes_encrypt() argument
124 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_paes_encrypt()
130 unsigned int nbytes) in ecb_paes_decrypt() argument
134 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_paes_decrypt()
194 unsigned int nbytes, n, k; in cbc_paes_crypt() local
204 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in cbc_paes_crypt()
[all …]
/Linux-v4.19/crypto/
Dpcbc.c51 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() local
63 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment()
65 return nbytes; in crypto_pcbc_encrypt_segment()
73 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() local
85 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_inplace()
89 return nbytes; in crypto_pcbc_encrypt_inplace()
98 unsigned int nbytes; in crypto_pcbc_encrypt() local
103 while ((nbytes = walk.nbytes)) { in crypto_pcbc_encrypt()
105 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt()
108 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt()
[all …]
Dscatterwalk.c23 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) in memcpy_dir() argument
28 memcpy(dst, src, nbytes); in memcpy_dir()
32 size_t nbytes, int out) in scatterwalk_copychunks() argument
38 if (len_this_page > nbytes) in scatterwalk_copychunks()
39 len_this_page = nbytes; in scatterwalk_copychunks()
49 if (nbytes == len_this_page) in scatterwalk_copychunks()
53 nbytes -= len_this_page; in scatterwalk_copychunks()
61 unsigned int start, unsigned int nbytes, int out) in scatterwalk_map_and_copy() argument
66 if (!nbytes) in scatterwalk_map_and_copy()
72 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
/Linux-v4.19/arch/arm64/crypto/
Daes-ctr-fallback.h26 while (walk.nbytes > 0) { in aes_ctr_encrypt_fallback()
29 int nbytes = walk.nbytes; in aes_ctr_encrypt_fallback() local
32 if (nbytes < walk.total) { in aes_ctr_encrypt_fallback()
33 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in aes_ctr_encrypt_fallback()
34 tail = walk.nbytes % AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
38 int bsize = min(nbytes, AES_BLOCK_SIZE); in aes_ctr_encrypt_fallback()
47 nbytes -= AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
48 } while (nbytes > 0); in aes_ctr_encrypt_fallback()
Dspeck-neon-glue.c32 unsigned int nbytes, void *tweak);
36 unsigned int nbytes, void *tweak);
58 while (walk.nbytes > 0) { in __speck128_xts_crypt()
59 unsigned int nbytes = walk.nbytes; in __speck128_xts_crypt() local
63 if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) { in __speck128_xts_crypt()
66 count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE); in __speck128_xts_crypt()
74 nbytes -= count; in __speck128_xts_crypt()
78 while (nbytes >= sizeof(tweak)) { in __speck128_xts_crypt()
86 nbytes -= sizeof(tweak); in __speck128_xts_crypt()
88 err = skcipher_walk_done(&walk, nbytes); in __speck128_xts_crypt()
[all …]
/Linux-v4.19/arch/arm/crypto/
Dspeck-neon-glue.c38 unsigned int nbytes, void *tweak);
42 unsigned int nbytes, void *tweak);
64 while (walk.nbytes > 0) { in __speck128_xts_crypt()
65 unsigned int nbytes = walk.nbytes; in __speck128_xts_crypt() local
69 if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) { in __speck128_xts_crypt()
72 count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE); in __speck128_xts_crypt()
80 nbytes -= count; in __speck128_xts_crypt()
84 while (nbytes >= sizeof(tweak)) { in __speck128_xts_crypt()
92 nbytes -= sizeof(tweak); in __speck128_xts_crypt()
94 err = skcipher_walk_done(&walk, nbytes); in __speck128_xts_crypt()
[all …]
/Linux-v4.19/drivers/crypto/
Dgeode-aes.c186 unsigned int nbytes) in fallback_blk_dec() argument
195 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); in fallback_blk_dec()
202 unsigned int nbytes) in fallback_blk_enc() argument
211 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); in fallback_blk_enc()
308 unsigned int nbytes) in geode_cbc_decrypt() argument
315 return fallback_blk_dec(desc, dst, src, nbytes); in geode_cbc_decrypt()
317 blkcipher_walk_init(&walk, dst, src, nbytes); in geode_cbc_decrypt()
321 while ((nbytes = walk.nbytes)) { in geode_cbc_decrypt()
325 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_decrypt()
330 nbytes -= ret; in geode_cbc_decrypt()
[all …]
/Linux-v4.19/drivers/crypto/vmx/
Daes_cbc.c95 struct scatterlist *src, unsigned int nbytes) in p8_aes_cbc_encrypt() argument
106 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); in p8_aes_cbc_encrypt()
110 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_cbc_encrypt()
112 while ((nbytes = walk.nbytes)) { in p8_aes_cbc_encrypt()
118 nbytes & AES_BLOCK_MASK, in p8_aes_cbc_encrypt()
124 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_cbc_encrypt()
125 ret = blkcipher_walk_done(desc, &walk, nbytes); in p8_aes_cbc_encrypt()
134 struct scatterlist *src, unsigned int nbytes) in p8_aes_cbc_decrypt() argument
145 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); in p8_aes_cbc_decrypt()
149 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_cbc_decrypt()
[all …]
/Linux-v4.19/drivers/infiniband/hw/hfi1/
Dpio_copy.c226 unsigned int nbytes) in read_low_bytes() argument
229 jcopy(&pbuf->carry.val8[0], from, nbytes); in read_low_bytes()
230 pbuf->carry_bytes = nbytes; in read_low_bytes()
242 const void *from, unsigned int nbytes) in read_extra_bytes() argument
244 jcopy(&pbuf->carry.val8[pbuf->carry_bytes], from, nbytes); in read_extra_bytes()
245 pbuf->carry_bytes += nbytes; in read_extra_bytes()
305 const void *from, size_t nbytes) in seg_pio_copy_start() argument
315 dend = dest + ((nbytes >> 3) * sizeof(u64)); in seg_pio_copy_start()
380 read_low_bytes(pbuf, from, nbytes & 0x7); in seg_pio_copy_start()
382 pbuf->qw_written = 1 /*PBC*/ + (nbytes >> 3); in seg_pio_copy_start()
[all …]
/Linux-v4.19/tools/objtool/arch/x86/lib/
Dinsn.c116 prefixes->nbytes++; in insn_get_prefixes()
140 insn->rex_prefix.nbytes = 1; in insn_get_prefixes()
170 insn->vex_prefix.nbytes = 4; in insn_get_prefixes()
178 insn->vex_prefix.nbytes = 3; in insn_get_prefixes()
190 insn->vex_prefix.nbytes = 2; in insn_get_prefixes()
226 opcode->nbytes = 1; in insn_get_opcode()
245 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode()
278 modrm->nbytes = 1; in insn_get_modrm()
316 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative()
334 if (insn->modrm.nbytes) { in insn_get_sib()
[all …]
/Linux-v4.19/tools/perf/util/intel-pt-decoder/
Dinsn.c116 prefixes->nbytes++; in insn_get_prefixes()
140 insn->rex_prefix.nbytes = 1; in insn_get_prefixes()
170 insn->vex_prefix.nbytes = 4; in insn_get_prefixes()
178 insn->vex_prefix.nbytes = 3; in insn_get_prefixes()
190 insn->vex_prefix.nbytes = 2; in insn_get_prefixes()
226 opcode->nbytes = 1; in insn_get_opcode()
245 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode()
278 modrm->nbytes = 1; in insn_get_modrm()
316 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative()
334 if (insn->modrm.nbytes) { in insn_get_sib()
[all …]
/Linux-v4.19/arch/x86/lib/
Dinsn.c116 prefixes->nbytes++; in insn_get_prefixes()
140 insn->rex_prefix.nbytes = 1; in insn_get_prefixes()
170 insn->vex_prefix.nbytes = 4; in insn_get_prefixes()
178 insn->vex_prefix.nbytes = 3; in insn_get_prefixes()
190 insn->vex_prefix.nbytes = 2; in insn_get_prefixes()
226 opcode->nbytes = 1; in insn_get_opcode()
245 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode()
278 modrm->nbytes = 1; in insn_get_modrm()
316 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative()
334 if (insn->modrm.nbytes) { in insn_get_sib()
[all …]
/Linux-v4.19/include/trace/events/
Drandom.h197 TP_PROTO(int nbytes, unsigned long IP),
199 TP_ARGS(nbytes, IP),
202 __field( int, nbytes )
207 __entry->nbytes = nbytes;
211 TP_printk("nbytes %d caller %pS", __entry->nbytes, (void *)__entry->IP)
215 TP_PROTO(int nbytes, unsigned long IP),
217 TP_ARGS(nbytes, IP)
221 TP_PROTO(int nbytes, unsigned long IP),
223 TP_ARGS(nbytes, IP)
227 TP_PROTO(const char *pool_name, int nbytes, int entropy_count,
[all …]

12345678910>>...17