/Linux-v4.19/drivers/s390/cio/ |
D | airq.c | 125 struct airq_iv *iv; in airq_iv_create() local 128 iv = kzalloc(sizeof(*iv), GFP_KERNEL); in airq_iv_create() 129 if (!iv) in airq_iv_create() 131 iv->bits = bits; in airq_iv_create() 133 iv->vector = kzalloc(size, GFP_KERNEL); in airq_iv_create() 134 if (!iv->vector) in airq_iv_create() 137 iv->avail = kmalloc(size, GFP_KERNEL); in airq_iv_create() 138 if (!iv->avail) in airq_iv_create() 140 memset(iv->avail, 0xff, size); in airq_iv_create() 141 iv->end = 0; in airq_iv_create() [all …]
|
/Linux-v4.19/crypto/ |
D | testmgr.h | 67 const char *iv; member 83 const char *iv; member 5788 .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", 5799 .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", 5806 .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", 5813 .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", 5824 .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", 5837 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 5911 .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", 5981 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", [all …]
|
D | pcbc.c | 54 u8 *iv = walk->iv; in crypto_pcbc_encrypt_segment() local 57 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 58 crypto_cipher_encrypt_one(tfm, dst, iv); in crypto_pcbc_encrypt_segment() 59 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 75 u8 *iv = walk->iv; in crypto_pcbc_encrypt_inplace() local 80 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 81 crypto_cipher_encrypt_one(tfm, src, iv); in crypto_pcbc_encrypt_inplace() 82 crypto_xor_cpy(iv, tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace() 87 memcpy(walk->iv, iv, bsize); in crypto_pcbc_encrypt_inplace() 124 u8 *iv = walk->iv; in crypto_pcbc_decrypt_segment() local [all …]
|
D | cfb.c | 61 u8 *iv = walk->iv; in crypto_cfb_final() local 64 crypto_cfb_encrypt_one(tfm, iv, stream); in crypto_cfb_final() 75 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() local 78 crypto_cfb_encrypt_one(tfm, iv, dst); in crypto_cfb_encrypt_segment() 80 memcpy(iv, dst, bsize); in crypto_cfb_encrypt_segment() 95 u8 *iv = walk->iv; in crypto_cfb_encrypt_inplace() local 99 crypto_cfb_encrypt_one(tfm, iv, tmp); in crypto_cfb_encrypt_inplace() 101 iv = src; in crypto_cfb_encrypt_inplace() 106 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_inplace() 143 u8 *iv = walk->iv; in crypto_cfb_decrypt_segment() local [all …]
|
/Linux-v4.19/arch/s390/include/asm/ |
D | airq.h | 47 void airq_iv_release(struct airq_iv *iv); 48 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num); 49 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num); 50 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start, 53 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument 55 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 58 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument 60 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 63 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 65 return iv->end; in airq_iv_end() [all …]
|
/Linux-v4.19/include/crypto/ |
D | cbc.h | 28 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() local 31 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment() 32 fn(tfm, iv, dst); in crypto_cbc_encrypt_segment() 33 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 49 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() local 52 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace() 54 iv = src; in crypto_cbc_encrypt_inplace() 59 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace() 93 u8 *iv = walk->iv; in crypto_cbc_decrypt_segment() local 97 crypto_xor(dst, iv, bsize); in crypto_cbc_decrypt_segment() [all …]
|
/Linux-v4.19/drivers/staging/wlan-ng/ |
D | p80211wep.c | 145 u8 *iv, u8 *icv) in wep_decrypt() argument 156 key[0] = iv[0]; in wep_decrypt() 157 key[1] = iv[1]; in wep_decrypt() 158 key[2] = iv[2]; in wep_decrypt() 159 keyidx = WEP_KEY(iv[3]); in wep_decrypt() 218 u8 *dst, u32 len, int keynum, u8 *iv, u8 *icv) in wep_encrypt() argument 235 get_random_bytes(iv, 3); in wep_encrypt() 236 while ((iv[1] == 0xff) && (iv[0] >= 3) && (iv[0] < keylen)) in wep_encrypt() 237 get_random_bytes(iv, 3); in wep_encrypt() 239 iv[3] = (keynum & 0x03) << 6; in wep_encrypt() [all …]
|
/Linux-v4.19/arch/x86/crypto/ |
D | glue_helper-asm-avx2.S | 60 #define load_ctr_16way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t0x, t1, \ argument 67 vmovdqu (iv), t2x; \ 91 vmovdqu t2x, (iv); 104 #define gf128mul_x_ble(iv, mask, tmp) \ argument 105 vpsrad $31, iv, tmp; \ 106 vpaddq iv, iv, iv; \ 109 vpxor tmp, iv, iv; 111 #define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \ argument 112 vpsrad $31, iv, tmp0; \ 113 vpaddq iv, iv, tmp1; \ [all …]
|
D | aesni-intel_glue.c | 101 const u8 *in, unsigned int len, u8 *iv); 103 const u8 *in, unsigned int len, u8 *iv); 114 const u8 *in, unsigned int len, u8 *iv); 116 const u8 *in, unsigned int len, u8 *iv); 119 const u8 *in, bool enc, u8 *iv); 138 const u8 *in, unsigned long plaintext_len, u8 *iv, 160 const u8 *in, unsigned long ciphertext_len, u8 *iv, 167 u8 *iv, 182 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv, 184 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv, [all …]
|
D | glue_helper.c | 94 u128 *iv = (u128 *)walk.iv; in glue_cbc_encrypt_req_128bit() local 97 u128_xor(dst, src, iv); in glue_cbc_encrypt_req_128bit() 99 iv = dst; in glue_cbc_encrypt_req_128bit() 105 *(u128 *)walk.iv = *iv; in glue_cbc_encrypt_req_128bit() 162 u128_xor(dst, dst, (u128 *)walk.iv); in glue_cbc_decrypt_req_128bit() 163 *(u128 *)walk.iv = last_iv; in glue_cbc_decrypt_req_128bit() 194 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 215 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() 225 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 230 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() [all …]
|
D | glue_helper-asm-avx.S | 54 #define load_ctr_8way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2) \ argument 60 vmovdqu (iv), x7; \ 80 vmovdqu t2, (iv); 93 #define gf128mul_x_ble(iv, mask, tmp) \ argument 94 vpsrad $31, iv, tmp; \ 95 vpaddq iv, iv, iv; \ 98 vpxor tmp, iv, iv; 100 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \ argument 105 vmovdqu (iv), tiv; \ 139 vmovdqu tiv, (iv);
|
D | twofish_glue_3way.c | 69 void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in twofish_enc_blk_ctr() argument 76 le128_to_be128(&ctrblk, iv); in twofish_enc_blk_ctr() 77 le128_inc(iv); in twofish_enc_blk_ctr() 85 le128 *iv) in twofish_enc_blk_ctr_3way() argument 95 le128_to_be128(&ctrblks[0], iv); in twofish_enc_blk_ctr_3way() 96 le128_inc(iv); in twofish_enc_blk_ctr_3way() 97 le128_to_be128(&ctrblks[1], iv); in twofish_enc_blk_ctr_3way() 98 le128_inc(iv); in twofish_enc_blk_ctr_3way() 99 le128_to_be128(&ctrblks[2], iv); in twofish_enc_blk_ctr_3way() 100 le128_inc(iv); in twofish_enc_blk_ctr_3way()
|
/Linux-v4.19/arch/arm64/crypto/ |
D | ghash-ce-glue.c | 374 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() local 384 memcpy(iv, req->iv, GCM_IV_SIZE); in gcm_encrypt() 385 put_unaligned_be32(1, iv + GCM_IV_SIZE); in gcm_encrypt() 393 pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc, nrounds); in gcm_encrypt() 394 put_unaligned_be32(2, iv + GCM_IV_SIZE); in gcm_encrypt() 395 pmull_gcm_encrypt_block(ks, iv, NULL, nrounds); in gcm_encrypt() 396 put_unaligned_be32(3, iv + GCM_IV_SIZE); in gcm_encrypt() 397 pmull_gcm_encrypt_block(ks + AES_BLOCK_SIZE, iv, NULL, nrounds); in gcm_encrypt() 398 put_unaligned_be32(4, iv + GCM_IV_SIZE); in gcm_encrypt() 408 iv, rk, nrounds, ks); in gcm_encrypt() [all …]
|
/Linux-v4.19/drivers/crypto/nx/ |
D | nx-aes-ccm.c | 135 static inline int crypto_ccm_check_iv(const u8 *iv) in crypto_ccm_check_iv() argument 138 if (1 > iv[0] || iv[0] > 7) in crypto_ccm_check_iv() 145 static int generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, in generate_b0() argument 151 memcpy(b0, iv, 16); in generate_b0() 168 static int generate_pat(u8 *iv, in generate_pat() argument 184 memset(iv + 15 - iv[0], 0, iv[0] + 1); in generate_pat() 223 rc = generate_b0(iv, assoclen, authsize, nbytes, b0); in generate_pat() 497 u8 *iv = rctx->iv; in ccm4309_aes_nx_encrypt() local 499 iv[0] = 3; in ccm4309_aes_nx_encrypt() 500 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); in ccm4309_aes_nx_encrypt() [all …]
|
D | nx-aes-ctr.c | 101 processed, csbcpb->cpb.aes_ctr.iv); in ctr_aes_nx_crypt() 134 u8 iv[16]; in ctr3686_aes_nx_crypt() local 136 memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_IV_SIZE); in ctr3686_aes_nx_crypt() 137 memcpy(iv + CTR_RFC3686_NONCE_SIZE, in ctr3686_aes_nx_crypt() 139 iv[12] = iv[13] = iv[14] = 0; in ctr3686_aes_nx_crypt() 140 iv[15] = 1; in ctr3686_aes_nx_crypt() 142 desc->info = iv; in ctr3686_aes_nx_crypt()
|
D | nx-aes-gcm.c | 336 desc.info = rctx->iv; in gcm_aes_nx_crypt() 435 char *iv = rctx->iv; in gcm_aes_nx_encrypt() local 437 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_nx_encrypt() 445 char *iv = rctx->iv; in gcm_aes_nx_decrypt() local 447 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_nx_decrypt() 457 char *iv = rctx->iv; in gcm4106_aes_nx_encrypt() local 460 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); in gcm4106_aes_nx_encrypt() 461 memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8); in gcm4106_aes_nx_encrypt() 474 char *iv = rctx->iv; in gcm4106_aes_nx_decrypt() local 477 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); in gcm4106_aes_nx_decrypt() [all …]
|
/Linux-v4.19/block/ |
D | blk-integrity.c | 43 struct bio_vec iv, ivprv = { NULL }; in blk_rq_count_integrity_sg() local 49 bio_for_each_integrity_vec(iv, bio, iter) { in blk_rq_count_integrity_sg() 52 if (!BIOVEC_PHYS_MERGEABLE(&ivprv, &iv)) in blk_rq_count_integrity_sg() 55 if (!BIOVEC_SEG_BOUNDARY(q, &ivprv, &iv)) in blk_rq_count_integrity_sg() 58 if (seg_size + iv.bv_len > queue_max_segment_size(q)) in blk_rq_count_integrity_sg() 61 seg_size += iv.bv_len; in blk_rq_count_integrity_sg() 65 seg_size = iv.bv_len; in blk_rq_count_integrity_sg() 69 ivprv = iv; in blk_rq_count_integrity_sg() 89 struct bio_vec iv, ivprv = { NULL }; in blk_rq_map_integrity_sg() local 95 bio_for_each_integrity_vec(iv, bio, iter) { in blk_rq_map_integrity_sg() [all …]
|
D | t10-pi.c | 213 struct bio_vec iv; in t10_pi_prepare() local 220 bip_for_each_vec(iv, bip, iter) { in t10_pi_prepare() 224 pmap = kmap_atomic(iv.bv_page); in t10_pi_prepare() 225 p = pmap + iv.bv_offset; in t10_pi_prepare() 226 for (j = 0; j < iv.bv_len; j += tuple_sz) { in t10_pi_prepare() 272 struct bio_vec iv; in t10_pi_complete() local 275 bip_for_each_vec(iv, bip, iter) { in t10_pi_complete() 279 pmap = kmap_atomic(iv.bv_page); in t10_pi_complete() 280 p = pmap + iv.bv_offset; in t10_pi_complete() 281 for (j = 0; j < iv.bv_len && intervals; j += tuple_sz) { in t10_pi_complete()
|
/Linux-v4.19/drivers/crypto/amcc/ |
D | crypto4xx_alg.c | 83 __le32 iv[AES_IV_SIZE]; in crypto4xx_crypt() local 86 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen); in crypto4xx_crypt() 89 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, in crypto4xx_crypt() 218 __le32 iv[AES_IV_SIZE / 4] = { in crypto4xx_rfc3686_encrypt() local 220 cpu_to_le32p((u32 *) req->iv), in crypto4xx_rfc3686_encrypt() 221 cpu_to_le32p((u32 *) (req->iv + 4)), in crypto4xx_rfc3686_encrypt() 225 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_encrypt() 233 __le32 iv[AES_IV_SIZE / 4] = { in crypto4xx_rfc3686_decrypt() local 235 cpu_to_le32p((u32 *) req->iv), in crypto4xx_rfc3686_decrypt() 236 cpu_to_le32p((u32 *) (req->iv + 4)), in crypto4xx_rfc3686_decrypt() [all …]
|
/Linux-v4.19/arch/x86/include/asm/crypto/ |
D | serpent-avx.h | 26 const u8 *src, le128 *iv); 29 const u8 *src, le128 *iv); 31 const u8 *src, le128 *iv); 34 le128 *iv); 36 extern void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv); 37 extern void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv);
|
D | camellia.h | 55 const u8 *src, le128 *iv); 58 const u8 *src, le128 *iv); 60 const u8 *src, le128 *iv); 89 le128 *iv); 91 le128 *iv); 93 extern void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv); 94 extern void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv);
|
/Linux-v4.19/net/mac80211/ |
D | wep.c | 57 static inline bool ieee80211_wep_weak_iv(u32 iv, int keylen) in ieee80211_wep_weak_iv() argument 64 if ((iv & 0xff00) == 0xff00) { in ieee80211_wep_weak_iv() 65 u8 B = (iv >> 16) & 0xff; in ieee80211_wep_weak_iv() 74 int keylen, int keyidx, u8 *iv) in ieee80211_wep_get_iv() argument 80 if (!iv) in ieee80211_wep_get_iv() 83 *iv++ = (local->wep_iv >> 16) & 0xff; in ieee80211_wep_get_iv() 84 *iv++ = (local->wep_iv >> 8) & 0xff; in ieee80211_wep_get_iv() 85 *iv++ = local->wep_iv & 0xff; in ieee80211_wep_get_iv() 86 *iv++ = keyidx << 6; in ieee80211_wep_get_iv() 165 u8 *iv; in ieee80211_wep_encrypt() local [all …]
|
D | aes_gmac.c | 24 u8 *zero, *__aad, iv[AES_BLOCK_SIZE]; in ieee80211_aes_gmac() local 45 memcpy(iv, nonce, GMAC_NONCE_LEN); in ieee80211_aes_gmac() 46 memset(iv + GMAC_NONCE_LEN, 0, sizeof(iv) - GMAC_NONCE_LEN); in ieee80211_aes_gmac() 47 iv[AES_BLOCK_SIZE - 1] = 0x01; in ieee80211_aes_gmac() 50 aead_request_set_crypt(aead_req, sg, sg, 0, iv); in ieee80211_aes_gmac()
|
/Linux-v4.19/net/rxrpc/ |
D | rxkad.c | 109 struct rxrpc_crypt iv; in rxkad_prime_packet_security() local 123 memcpy(&iv, token->kad->session_key, sizeof(iv)); in rxkad_prime_packet_security() 133 skcipher_request_set_crypt(req, &sg, &sg, tmpsize, iv.x); in rxkad_prime_packet_security() 154 struct rxrpc_crypt iv; in rxkad_secure_packet_auth() local 167 memset(&iv, 0, sizeof(iv)); in rxkad_secure_packet_auth() 172 skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x); in rxkad_secure_packet_auth() 192 struct rxrpc_crypt iv; in rxkad_secure_packet_encrypt() local 212 memcpy(&iv, token->kad->session_key, sizeof(iv)); in rxkad_secure_packet_encrypt() 217 skcipher_request_set_crypt(req, &sg[0], &sg[0], sizeof(rxkhdr), iv.x); in rxkad_secure_packet_encrypt() 233 skcipher_request_set_crypt(req, sg, sg, len, iv.x); in rxkad_secure_packet_encrypt() [all …]
|
/Linux-v4.19/drivers/ssb/ |
D | host_soc.c | 177 struct ssb_init_invariants *iv) in ssb_host_soc_get_invariants() argument 183 memset(&iv->boardinfo, 0, sizeof(struct ssb_boardinfo)); in ssb_host_soc_get_invariants() 187 err = kstrtou16(strim(buf), 0, &iv->boardinfo.vendor); in ssb_host_soc_get_invariants() 192 if (!iv->boardinfo.vendor) in ssb_host_soc_get_invariants() 193 iv->boardinfo.vendor = SSB_BOARDVENDOR_BCM; in ssb_host_soc_get_invariants() 197 err = kstrtou16(strim(buf), 0, &iv->boardinfo.type); in ssb_host_soc_get_invariants() 203 memset(&iv->sprom, 0, sizeof(struct ssb_sprom)); in ssb_host_soc_get_invariants() 204 ssb_fill_sprom_with_fallback(bus, &iv->sprom); in ssb_host_soc_get_invariants() 207 iv->has_cardbus_slot = !!simple_strtoul(buf, NULL, 10); in ssb_host_soc_get_invariants()
|