/Linux-v5.4/arch/x86/crypto/ |
D | cast5_avx_glue.c | 36 unsigned int nbytes) in cast5_fpu_begin() argument 39 walk, fpu_enabled, nbytes); in cast5_fpu_begin() 54 unsigned int nbytes; in ecb_crypt() local 60 while ((nbytes = walk.nbytes)) { in ecb_crypt() 64 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ecb_crypt() 67 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { in ecb_crypt() 74 nbytes -= bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt() 75 } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); in ecb_crypt() 77 if (nbytes < bsize) in ecb_crypt() 89 nbytes -= bsize; in ecb_crypt() [all …]
|
D | glue_helper.c | 28 unsigned int nbytes; in glue_ecb_req_128bit() local 33 while ((nbytes = walk.nbytes)) { in glue_ecb_req_128bit() 40 &walk, fpu_enabled, nbytes); in glue_ecb_req_128bit() 44 if (nbytes < func_bytes) in glue_ecb_req_128bit() 52 nbytes -= func_bytes; in glue_ecb_req_128bit() 53 } while (nbytes >= func_bytes); in glue_ecb_req_128bit() 55 if (nbytes < bsize) in glue_ecb_req_128bit() 58 err = skcipher_walk_done(&walk, nbytes); in glue_ecb_req_128bit() 72 unsigned int nbytes; in glue_cbc_encrypt_req_128bit() local 77 while ((nbytes = walk.nbytes)) { in glue_cbc_encrypt_req_128bit() [all …]
|
D | blowfish_glue.c | 79 unsigned int nbytes; in ecb_crypt() local 84 while ((nbytes = walk.nbytes)) { in ecb_crypt() 89 if (nbytes >= bsize * 4) { in ecb_crypt() 95 nbytes -= bsize * 4; in ecb_crypt() 96 } while (nbytes >= bsize * 4); in ecb_crypt() 98 if (nbytes < bsize) in ecb_crypt() 108 nbytes -= bsize; in ecb_crypt() 109 } while (nbytes >= bsize); in ecb_crypt() 112 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 132 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local [all …]
|
D | des3_ede_glue.c | 80 unsigned int nbytes; in ecb_crypt() local 85 while ((nbytes = walk.nbytes)) { in ecb_crypt() 90 if (nbytes >= bsize * 3) { in ecb_crypt() 97 nbytes -= bsize * 3; in ecb_crypt() 98 } while (nbytes >= bsize * 3); in ecb_crypt() 100 if (nbytes < bsize) in ecb_crypt() 110 nbytes -= bsize; in ecb_crypt() 111 } while (nbytes >= bsize); in ecb_crypt() 114 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 140 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local [all …]
|
/Linux-v5.4/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 182 struct scatterlist *src, unsigned int nbytes) in ppc_ecb_encrypt() argument 190 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt() 193 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt() 194 ubytes = nbytes > MAX_BYTES ? in ppc_ecb_encrypt() 195 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt() 196 nbytes -= ubytes; in ppc_ecb_encrypt() 200 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_encrypt() 210 struct scatterlist *src, unsigned int nbytes) in ppc_ecb_decrypt() argument 218 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt() 221 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt() [all …]
|
/Linux-v5.4/crypto/ |
D | pcbc.c | 24 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() local 36 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment() 38 return nbytes; in crypto_pcbc_encrypt_segment() 46 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() local 58 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_inplace() 60 return nbytes; in crypto_pcbc_encrypt_inplace() 68 unsigned int nbytes; in crypto_pcbc_encrypt() local 73 while ((nbytes = walk.nbytes)) { in crypto_pcbc_encrypt() 75 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt() 78 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt() [all …]
|
D | cfb.c | 51 unsigned int nbytes = walk->nbytes; in crypto_cfb_final() local 54 crypto_xor_cpy(dst, stream, src, nbytes); in crypto_cfb_final() 61 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment() local 73 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_segment() 77 return nbytes; in crypto_cfb_encrypt_segment() 84 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_inplace() local 95 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_inplace() 99 return nbytes; in crypto_cfb_encrypt_inplace() 111 while (walk.nbytes >= bsize) { in crypto_cfb_encrypt() 119 if (walk.nbytes) { in crypto_cfb_encrypt() [all …]
|
D | scatterwalk.c | 18 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) in memcpy_dir() argument 23 memcpy(dst, src, nbytes); in memcpy_dir() 27 size_t nbytes, int out) in scatterwalk_copychunks() argument 33 if (len_this_page > nbytes) in scatterwalk_copychunks() 34 len_this_page = nbytes; in scatterwalk_copychunks() 44 if (nbytes == len_this_page) in scatterwalk_copychunks() 48 nbytes -= len_this_page; in scatterwalk_copychunks() 56 unsigned int start, unsigned int nbytes, int out) in scatterwalk_map_and_copy() argument 61 if (!nbytes) in scatterwalk_map_and_copy() 67 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
|
/Linux-v5.4/lib/mpi/ |
D | mpicoder.c | 35 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes) in mpi_read_raw_data() argument 43 while (nbytes > 0 && buffer[0] == 0) { in mpi_read_raw_data() 45 nbytes--; in mpi_read_raw_data() 48 nbits = nbytes * 8; in mpi_read_raw_data() 53 if (nbytes > 0) in mpi_read_raw_data() 56 nlimbs = DIV_ROUND_UP(nbytes, BYTES_PER_MPI_LIMB); in mpi_read_raw_data() 64 if (nbytes > 0) { in mpi_read_raw_data() 65 i = BYTES_PER_MPI_LIMB - nbytes % BYTES_PER_MPI_LIMB; in mpi_read_raw_data() 84 unsigned int nbits, nbytes; in mpi_read_from_buffer() local 96 nbytes = DIV_ROUND_UP(nbits, 8); in mpi_read_from_buffer() [all …]
|
/Linux-v5.4/arch/sparc/crypto/ |
D | des_glue.c | 92 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument 98 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 106 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 107 unsigned int block_len = nbytes & DES_BLOCK_MASK; in __ecb_crypt() 114 nbytes &= DES_BLOCK_SIZE - 1; in __ecb_crypt() 115 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 123 unsigned int nbytes) in ecb_encrypt() argument 125 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt() 130 unsigned int nbytes) in ecb_decrypt() argument 132 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt() [all …]
|
D | camellia_glue.c | 88 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument 100 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 109 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 110 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK; in __ecb_crypt() 120 nbytes &= CAMELLIA_BLOCK_SIZE - 1; in __ecb_crypt() 121 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 129 unsigned int nbytes) in ecb_encrypt() argument 131 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt() 136 unsigned int nbytes) in ecb_decrypt() argument 138 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt() [all …]
|
D | aes_glue.c | 218 unsigned int nbytes) in ecb_encrypt() argument 224 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 229 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 230 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_encrypt() 238 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 239 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt() 247 unsigned int nbytes) in ecb_decrypt() argument 254 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 260 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 261 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_decrypt() [all …]
|
/Linux-v5.4/include/crypto/ |
D | cbc.h | 20 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() local 32 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment() 34 return nbytes; in crypto_cbc_encrypt_segment() 42 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() local 52 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_inplace() 56 return nbytes; in crypto_cbc_encrypt_inplace() 69 while (walk.nbytes) { in crypto_cbc_encrypt_walk() 85 unsigned int nbytes = walk->nbytes; in crypto_cbc_decrypt_segment() local 97 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_decrypt_segment() 101 return nbytes; in crypto_cbc_decrypt_segment() [all …]
|
D | ctr.h | 36 while (walk.nbytes > 0) { in crypto_ctr_encrypt_walk() 39 int nbytes = walk.nbytes; in crypto_ctr_encrypt_walk() local 42 if (nbytes < walk.total) { in crypto_ctr_encrypt_walk() 43 tail = walk.nbytes & (blocksize - 1); in crypto_ctr_encrypt_walk() 44 nbytes -= tail; in crypto_ctr_encrypt_walk() 48 int bsize = min(nbytes, blocksize); in crypto_ctr_encrypt_walk() 57 nbytes -= bsize; in crypto_ctr_encrypt_walk() 58 } while (nbytes > 0); in crypto_ctr_encrypt_walk()
|
/Linux-v5.4/arch/s390/crypto/ |
D | des_s390.c | 86 unsigned int nbytes, n; in ecb_desall_crypt() local 90 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in ecb_desall_crypt() 92 n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt() 95 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_desall_crypt() 104 unsigned int nbytes, n; in cbc_desall_crypt() local 114 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in cbc_desall_crypt() 116 n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt() 119 ret = blkcipher_walk_done(desc, walk, nbytes - n); in cbc_desall_crypt() 127 unsigned int nbytes) in ecb_des_encrypt() argument 131 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_des_encrypt() [all …]
|
D | aes_s390.c | 68 unsigned int nbytes; member 203 unsigned int nbytes) in fallback_blk_dec() argument 212 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); in fallback_blk_dec() 222 unsigned int nbytes) in fallback_blk_enc() argument 231 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); in fallback_blk_enc() 262 unsigned int nbytes, n; in ecb_aes_crypt() local 266 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_aes_crypt() 268 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 271 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_aes_crypt() 279 unsigned int nbytes) in ecb_aes_encrypt() argument [all …]
|
D | paes_s390.c | 165 unsigned int nbytes, n, k; in ecb_paes_crypt() local 169 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_paes_crypt() 171 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt() 175 ret = blkcipher_walk_done(desc, walk, nbytes - k); in ecb_paes_crypt() 186 unsigned int nbytes) in ecb_paes_encrypt() argument 190 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_paes_encrypt() 196 unsigned int nbytes) in ecb_paes_decrypt() argument 200 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_paes_decrypt() 283 unsigned int nbytes, n, k; in cbc_paes_crypt() local 293 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in cbc_paes_crypt() [all …]
|
/Linux-v5.4/drivers/staging/comedi/ |
D | comedi_buf.c | 341 unsigned int nbytes) in comedi_buf_write_alloc() argument 346 if (nbytes > unalloc) in comedi_buf_write_alloc() 347 nbytes = unalloc; in comedi_buf_write_alloc() 349 async->buf_write_alloc_count += nbytes; in comedi_buf_write_alloc() 357 return nbytes; in comedi_buf_write_alloc() 431 unsigned int nbytes) in comedi_buf_write_free() argument 436 if (nbytes > allocated) in comedi_buf_write_free() 437 nbytes = allocated; in comedi_buf_write_free() 439 async->buf_write_count += nbytes; in comedi_buf_write_free() 440 async->buf_write_ptr += nbytes; in comedi_buf_write_free() [all …]
|
/Linux-v5.4/drivers/crypto/ |
D | geode-aes.c | 182 unsigned int nbytes) in fallback_blk_dec() argument 191 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); in fallback_blk_dec() 198 unsigned int nbytes) in fallback_blk_enc() argument 207 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); in fallback_blk_enc() 304 unsigned int nbytes) in geode_cbc_decrypt() argument 311 return fallback_blk_dec(desc, dst, src, nbytes); in geode_cbc_decrypt() 313 blkcipher_walk_init(&walk, dst, src, nbytes); in geode_cbc_decrypt() 317 while ((nbytes = walk.nbytes)) { in geode_cbc_decrypt() 321 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_decrypt() 326 nbytes -= ret; in geode_cbc_decrypt() [all …]
|
/Linux-v5.4/drivers/infiniband/hw/hfi1/ |
D | pio_copy.c | 226 unsigned int nbytes) in read_low_bytes() argument 229 jcopy(&pbuf->carry.val8[0], from, nbytes); in read_low_bytes() 230 pbuf->carry_bytes = nbytes; in read_low_bytes() 242 const void *from, unsigned int nbytes) in read_extra_bytes() argument 244 jcopy(&pbuf->carry.val8[pbuf->carry_bytes], from, nbytes); in read_extra_bytes() 245 pbuf->carry_bytes += nbytes; in read_extra_bytes() 305 const void *from, size_t nbytes) in seg_pio_copy_start() argument 315 dend = dest + ((nbytes >> 3) * sizeof(u64)); in seg_pio_copy_start() 380 read_low_bytes(pbuf, from, nbytes & 0x7); in seg_pio_copy_start() 382 pbuf->qw_written = 1 /*PBC*/ + (nbytes >> 3); in seg_pio_copy_start() [all …]
|
/Linux-v5.4/arch/x86/lib/ |
D | insn.c | 103 prefixes->nbytes++; in insn_get_prefixes() 127 insn->rex_prefix.nbytes = 1; in insn_get_prefixes() 157 insn->vex_prefix.nbytes = 4; in insn_get_prefixes() 165 insn->vex_prefix.nbytes = 3; in insn_get_prefixes() 177 insn->vex_prefix.nbytes = 2; in insn_get_prefixes() 213 opcode->nbytes = 1; in insn_get_opcode() 232 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode() 265 modrm->nbytes = 1; in insn_get_modrm() 303 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative() 321 if (insn->modrm.nbytes) { in insn_get_sib() [all …]
|
/Linux-v5.4/tools/arch/x86/lib/ |
D | insn.c | 103 prefixes->nbytes++; in insn_get_prefixes() 127 insn->rex_prefix.nbytes = 1; in insn_get_prefixes() 157 insn->vex_prefix.nbytes = 4; in insn_get_prefixes() 165 insn->vex_prefix.nbytes = 3; in insn_get_prefixes() 177 insn->vex_prefix.nbytes = 2; in insn_get_prefixes() 213 opcode->nbytes = 1; in insn_get_opcode() 232 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode() 265 modrm->nbytes = 1; in insn_get_modrm() 303 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative() 321 if (insn->modrm.nbytes) { in insn_get_sib() [all …]
|
/Linux-v5.4/include/trace/events/ |
D | random.h | 194 TP_PROTO(int nbytes, unsigned long IP), 196 TP_ARGS(nbytes, IP), 199 __field( int, nbytes ) 204 __entry->nbytes = nbytes; 208 TP_printk("nbytes %d caller %pS", __entry->nbytes, (void *)__entry->IP) 212 TP_PROTO(int nbytes, unsigned long IP), 214 TP_ARGS(nbytes, IP) 218 TP_PROTO(int nbytes, unsigned long IP), 220 TP_ARGS(nbytes, IP) 224 TP_PROTO(const char *pool_name, int nbytes, int entropy_count, [all …]
|
/Linux-v5.4/drivers/pci/hotplug/ |
D | rpadlpar_sysfs.c | 28 const char *buf, size_t nbytes) in add_slot_store() argument 34 if (nbytes >= MAX_DRC_NAME_LEN) in add_slot_store() 37 memcpy(drc_name, buf, nbytes); in add_slot_store() 41 end = &drc_name[nbytes]; in add_slot_store() 48 return nbytes; in add_slot_store() 59 const char *buf, size_t nbytes) in remove_slot_store() argument 65 if (nbytes >= MAX_DRC_NAME_LEN) in remove_slot_store() 68 memcpy(drc_name, buf, nbytes); in remove_slot_store() 72 end = &drc_name[nbytes]; in remove_slot_store() 79 return nbytes; in remove_slot_store()
|
/Linux-v5.4/drivers/spi/ |
D | spi-mem.c | 40 if (!op->data.nbytes) in spi_controller_dma_map_mem_op_data() 53 return spi_map_buf(ctlr, dmadev, sgt, op->data.buf.in, op->data.nbytes, in spi_controller_dma_map_mem_op_data() 86 if (!op->data.nbytes) in spi_controller_dma_unmap_mem_op_data() 144 if (op->addr.nbytes && in spi_mem_default_supports_op() 148 if (op->dummy.nbytes && in spi_mem_default_supports_op() 174 if ((op->addr.nbytes && !op->addr.buswidth) || in spi_mem_check_op() 175 (op->dummy.nbytes && !op->dummy.buswidth) || in spi_mem_check_op() 176 (op->data.nbytes && !op->data.buswidth)) in spi_mem_check_op() 307 tmpbufsize = sizeof(op->cmd.opcode) + op->addr.nbytes + in spi_mem_exec_op() 308 op->dummy.nbytes; in spi_mem_exec_op() [all …]
|