/Linux-v5.4/fs/ext4/ |
D | indirect.c | 245 Indirect *partial) in ext4_find_goal() argument 253 goal = ext4_find_near(inode, partial); in ext4_find_goal() 517 Indirect *partial; in ext4_ind_map_blocks() local 533 partial = ext4_get_branch(inode, depth, offsets, chain, &err); in ext4_ind_map_blocks() 536 if (!partial) { in ext4_ind_map_blocks() 565 for (i = partial - chain + 1; i < depth; i++) in ext4_ind_map_blocks() 598 ar.goal = ext4_find_goal(inode, map->m_lblk, partial); in ext4_ind_map_blocks() 601 indirect_blks = (chain + depth) - partial - 1; in ext4_ind_map_blocks() 607 ar.len = ext4_blks_to_allocate(partial, indirect_blks, in ext4_ind_map_blocks() 614 offsets + (partial - chain), partial); in ext4_ind_map_blocks() [all …]
|
/Linux-v5.4/fs/minix/ |
D | itree_common.c | 152 Indirect *partial; in get_block() local 160 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 163 if (!partial) { in get_block() 167 partial = chain+depth-1; /* the whole chain */ in get_block() 174 while (partial > chain) { in get_block() 175 brelse(partial->bh); in get_block() 176 partial--; in get_block() 190 left = (chain + depth) - partial; in get_block() 191 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 195 if (splice_branch(inode, chain, partial, left) < 0) in get_block() [all …]
|
/Linux-v5.4/include/crypto/ |
D | sha256_base.h | 41 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update() local 45 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update() 48 if (partial) { in sha256_base_do_update() 49 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update() 51 memcpy(sctx->buf + partial, data, p); in sha256_base_do_update() 65 partial = 0; in sha256_base_do_update() 68 memcpy(sctx->buf + partial, data, len); in sha256_base_do_update() 79 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize() local 81 sctx->buf[partial++] = 0x80; in sha256_base_do_finalize() 82 if (partial > bit_offset) { in sha256_base_do_finalize() [all …]
|
D | sha1_base.h | 40 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update() local 44 if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { in sha1_base_do_update() 47 if (partial) { in sha1_base_do_update() 48 int p = SHA1_BLOCK_SIZE - partial; in sha1_base_do_update() 50 memcpy(sctx->buffer + partial, data, p); in sha1_base_do_update() 64 partial = 0; in sha1_base_do_update() 67 memcpy(sctx->buffer + partial, data, len); in sha1_base_do_update() 78 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_finalize() local 80 sctx->buffer[partial++] = 0x80; in sha1_base_do_finalize() 81 if (partial > bit_offset) { in sha1_base_do_finalize() [all …]
|
D | sm3_base.h | 43 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update() local 47 if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { in sm3_base_do_update() 50 if (partial) { in sm3_base_do_update() 51 int p = SM3_BLOCK_SIZE - partial; in sm3_base_do_update() 53 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update() 67 partial = 0; in sm3_base_do_update() 70 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update() 81 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize() local 83 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize() 84 if (partial > bit_offset) { in sm3_base_do_finalize() [all …]
|
D | sha512_base.h | 61 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_update() local 67 if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { in sha512_base_do_update() 70 if (partial) { in sha512_base_do_update() 71 int p = SHA512_BLOCK_SIZE - partial; in sha512_base_do_update() 73 memcpy(sctx->buf + partial, data, p); in sha512_base_do_update() 87 partial = 0; in sha512_base_do_update() 90 memcpy(sctx->buf + partial, data, len); in sha512_base_do_update() 101 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_finalize() local 103 sctx->buf[partial++] = 0x80; in sha512_base_do_finalize() 104 if (partial > bit_offset) { in sha512_base_do_finalize() [all …]
|
/Linux-v5.4/fs/sysv/ |
D | itree.c | 209 Indirect *partial; in get_block() local 218 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 222 if (!partial) { in get_block() 227 partial = chain+depth-1; /* the whole chain */ in get_block() 234 while (partial > chain) { in get_block() 235 brelse(partial->bh); in get_block() 236 partial--; in get_block() 250 left = (chain + depth) - partial; in get_block() 251 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 255 if (splice_branch(inode, chain, partial, left) < 0) in get_block() [all …]
|
/Linux-v5.4/drivers/crypto/ |
D | padlock-sha.c | 289 unsigned int partial, done; in padlock_sha1_update_nano() local 296 partial = sctx->count & 0x3f; in padlock_sha1_update_nano() 302 if ((partial + len) >= SHA1_BLOCK_SIZE) { in padlock_sha1_update_nano() 305 if (partial) { in padlock_sha1_update_nano() 306 done = -partial; in padlock_sha1_update_nano() 307 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano() 326 partial = 0; in padlock_sha1_update_nano() 329 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano() 337 unsigned int partial, padlen; in padlock_sha1_final_nano() local 344 partial = state->count & 0x3f; in padlock_sha1_final_nano() [all …]
|
/Linux-v5.4/arch/arm64/crypto/ |
D | sha3-ce-glue.c | 39 if ((sctx->partial + len) >= sctx->rsiz) { in sha3_update() 42 if (sctx->partial) { in sha3_update() 43 int p = sctx->rsiz - sctx->partial; in sha3_update() 45 memcpy(sctx->buf + sctx->partial, data, p); in sha3_update() 52 sctx->partial = 0; in sha3_update() 67 memcpy(sctx->buf + sctx->partial, data, len); in sha3_update() 68 sctx->partial += len; in sha3_update() 83 sctx->buf[sctx->partial++] = 0x06; in sha3_final() 84 memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); in sha3_final()
|
D | ghash-ce-glue.c | 126 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in __ghash_update() local 130 if ((partial + len) >= GHASH_BLOCK_SIZE) { in __ghash_update() 134 if (partial) { in __ghash_update() 135 int p = GHASH_BLOCK_SIZE - partial; in __ghash_update() 137 memcpy(ctx->buf + partial, src, p); in __ghash_update() 149 partial ? ctx->buf : NULL, in __ghash_update() 154 partial = 0; in __ghash_update() 158 memcpy(ctx->buf + partial, src, len); in __ghash_update() 177 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_final_p8() local 179 if (partial) { in ghash_final_p8() [all …]
|
/Linux-v5.4/arch/powerpc/crypto/ |
D | sha1.c | 41 unsigned int partial, done; in sha1_update() local 44 partial = sctx->count & 0x3f; in sha1_update() 49 if ((partial + len) > 63) { in sha1_update() 52 if (partial) { in sha1_update() 53 done = -partial; in sha1_update() 54 memcpy(sctx->buffer + partial, data, done + 64); in sha1_update() 65 partial = 0; in sha1_update() 67 memcpy(sctx->buffer + partial, src, len - done); in sha1_update()
|
/Linux-v5.4/arch/sparc/crypto/ |
D | sha1_glue.c | 42 unsigned int len, unsigned int partial) in __sha1_sparc64_update() argument 47 if (partial) { in __sha1_sparc64_update() 48 done = SHA1_BLOCK_SIZE - partial; in __sha1_sparc64_update() 49 memcpy(sctx->buffer + partial, data, done); in __sha1_sparc64_update() 66 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_sparc64_update() local 69 if (partial + len < SHA1_BLOCK_SIZE) { in sha1_sparc64_update() 71 memcpy(sctx->buffer + partial, data, len); in sha1_sparc64_update() 73 __sha1_sparc64_update(sctx, data, len, partial); in sha1_sparc64_update()
|
D | md5_glue.c | 47 unsigned int len, unsigned int partial) in __md5_sparc64_update() argument 52 if (partial) { in __md5_sparc64_update() 53 done = MD5_HMAC_BLOCK_SIZE - partial; in __md5_sparc64_update() 54 memcpy((u8 *)sctx->block + partial, data, done); in __md5_sparc64_update() 71 unsigned int partial = sctx->byte_count % MD5_HMAC_BLOCK_SIZE; in md5_sparc64_update() local 74 if (partial + len < MD5_HMAC_BLOCK_SIZE) { in md5_sparc64_update() 76 memcpy((u8 *)sctx->block + partial, data, len); in md5_sparc64_update() 78 __md5_sparc64_update(sctx, data, len, partial); in md5_sparc64_update()
|
D | sha512_glue.c | 62 unsigned int len, unsigned int partial) in __sha512_sparc64_update() argument 68 if (partial) { in __sha512_sparc64_update() 69 done = SHA512_BLOCK_SIZE - partial; in __sha512_sparc64_update() 70 memcpy(sctx->buf + partial, data, done); in __sha512_sparc64_update() 87 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_sparc64_update() local 90 if (partial + len < SHA512_BLOCK_SIZE) { in sha512_sparc64_update() 93 memcpy(sctx->buf + partial, data, len); in sha512_sparc64_update() 95 __sha512_sparc64_update(sctx, data, len, partial); in sha512_sparc64_update()
|
D | sha256_glue.c | 63 unsigned int len, unsigned int partial) in __sha256_sparc64_update() argument 68 if (partial) { in __sha256_sparc64_update() 69 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update() 70 memcpy(sctx->buf + partial, data, done); in __sha256_sparc64_update() 87 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update() local 90 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update() 92 memcpy(sctx->buf + partial, data, len); in sha256_sparc64_update() 94 __sha256_sparc64_update(sctx, data, len, partial); in sha256_sparc64_update()
|
/Linux-v5.4/fs/ext2/ |
D | inode.c | 331 Indirect *partial) in ext2_find_goal() argument 346 return ext2_find_near(inode, partial); in ext2_find_goal() 631 Indirect *partial; in ext2_get_blocks() local 647 partial = ext2_get_branch(inode, depth, offsets, chain, &err); in ext2_get_blocks() 649 if (!partial) { in ext2_get_blocks() 665 partial = chain + depth - 1; in ext2_get_blocks() 695 if (err == -EAGAIN || !verify_chain(chain, partial)) { in ext2_get_blocks() 696 while (partial > chain) { in ext2_get_blocks() 697 brelse(partial->bh); in ext2_get_blocks() 698 partial--; in ext2_get_blocks() [all …]
|
/Linux-v5.4/arch/mips/cavium-octeon/crypto/ |
D | octeon-sha1.c | 91 unsigned int partial; in __octeon_sha1_update() local 95 partial = sctx->count % SHA1_BLOCK_SIZE; in __octeon_sha1_update() 100 if ((partial + len) >= SHA1_BLOCK_SIZE) { in __octeon_sha1_update() 101 if (partial) { in __octeon_sha1_update() 102 done = -partial; in __octeon_sha1_update() 103 memcpy(sctx->buffer + partial, data, in __octeon_sha1_update() 114 partial = 0; in __octeon_sha1_update() 116 memcpy(sctx->buffer + partial, src, len - done); in __octeon_sha1_update()
|
D | octeon-sha256.c | 103 unsigned int partial; in __octeon_sha256_update() local 107 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update() 112 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update() 113 if (partial) { in __octeon_sha256_update() 114 done = -partial; in __octeon_sha256_update() 115 memcpy(sctx->buf + partial, data, in __octeon_sha256_update() 126 partial = 0; in __octeon_sha256_update() 128 memcpy(sctx->buf + partial, src, len - done); in __octeon_sha256_update()
|
/Linux-v5.4/drivers/usb/storage/ |
D | freecom.c | 227 unsigned int partial; in freecom_transport() local 266 FCM_STATUS_PACKET_LENGTH, &partial); in freecom_transport() 267 usb_stor_dbg(us, "foo Status result %d %u\n", result, partial); in freecom_transport() 271 US_DEBUG(pdump(us, (void *)fst, partial)); in freecom_transport() 308 FCM_STATUS_PACKET_LENGTH, &partial); in freecom_transport() 310 usb_stor_dbg(us, "bar Status result %d %u\n", result, partial); in freecom_transport() 314 US_DEBUG(pdump(us, (void *)fst, partial)); in freecom_transport() 317 if (partial != 4) in freecom_transport() 376 FCM_PACKET_LENGTH, &partial); in freecom_transport() 377 US_DEBUG(pdump(us, (void *)fst, partial)); in freecom_transport() [all …]
|
D | initializers.c | 56 unsigned int partial; in usb_stor_ucr61s2b_init() local 70 US_BULK_CB_WRAP_LEN, &partial); in usb_stor_ucr61s2b_init() 76 US_BULK_CS_WRAP_LEN, &partial); in usb_stor_ucr61s2b_init()
|
/Linux-v5.4/arch/x86/include/asm/ |
D | unwind.h | 68 bool *partial) in unwind_get_entry_regs() argument 73 if (partial) { in unwind_get_entry_regs() 75 *partial = !state->full_regs; in unwind_get_entry_regs() 77 *partial = false; in unwind_get_entry_regs() 85 bool *partial) in unwind_get_entry_regs() argument
|
/Linux-v5.4/arch/arm/crypto/ |
D | ghash-ce-glue.c | 102 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() local 106 if ((partial + len) >= GHASH_BLOCK_SIZE) { in ghash_update() 110 if (partial) { in ghash_update() 111 int p = GHASH_BLOCK_SIZE - partial; in ghash_update() 113 memcpy(ctx->buf + partial, src, p); in ghash_update() 122 partial ? ctx->buf : NULL); in ghash_update() 124 partial = 0; in ghash_update() 127 memcpy(ctx->buf + partial, src, len); in ghash_update() 134 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_final() local 136 if (partial) { in ghash_final() [all …]
|
/Linux-v5.4/arch/x86/kernel/ |
D | dumpstack.c | 137 bool partial) in show_regs_if_on_stack() argument 148 if (!partial && on_stack(info, regs, sizeof(*regs))) { in show_regs_if_on_stack() 151 } else if (partial && on_stack(info, (void *)regs + IRET_FRAME_OFFSET, in show_regs_if_on_stack() 169 bool partial = false; in show_trace_log_lvl() local 175 regs = unwind_get_entry_regs(&state, &partial); in show_trace_log_lvl() 213 show_regs_if_on_stack(&stack_info, regs, partial); in show_trace_log_lvl() 272 regs = unwind_get_entry_regs(&state, &partial); in show_trace_log_lvl() 274 show_regs_if_on_stack(&stack_info, regs, partial); in show_trace_log_lvl()
|
/Linux-v5.4/crypto/ |
D | sha3_generic.c | 168 sctx->partial = 0; in crypto_sha3_init() 185 if ((sctx->partial + len) > (sctx->rsiz - 1)) { in crypto_sha3_update() 186 if (sctx->partial) { in crypto_sha3_update() 187 done = -sctx->partial; in crypto_sha3_update() 188 memcpy(sctx->buf + sctx->partial, data, in crypto_sha3_update() 204 sctx->partial = 0; in crypto_sha3_update() 206 memcpy(sctx->buf + sctx->partial, src, len - done); in crypto_sha3_update() 207 sctx->partial += (len - done); in crypto_sha3_update() 216 unsigned int i, inlen = sctx->partial; in crypto_sha3_final()
|
/Linux-v5.4/drivers/gpu/drm/amd/powerplay/hwmgr/ |
D | ppevvmath.h | 46 } partial; member 338 X_LessThanOne = (X.partial.real == 0 && X.partial.decimal != 0 && X.full >= 0); in fMultiply() 339 Y_LessThanOne = (Y.partial.real == 0 && Y.partial.decimal != 0 && Y.full >= 0); in fMultiply() 413 if (num.partial.real > 3000) in fSqrt() 415 else if (num.partial.real > 1000) in fSqrt() 417 else if (num.partial.real > 100) in fSqrt() 512 int i, scaledDecimal = 0, tmp = A.partial.decimal; in uGetScaledDecimal() 545 solution.partial.decimal = 0; /*All fractional digits changes to 0 */ in fRoundUpByStepSize() 548 solution.partial.real += 1; /*Error term of 1 added */ in fRoundUpByStepSize()
|