| /Linux-v5.15/drivers/crypto/vmx/ |
| D | aes.c | 23 struct crypto_cipher *fallback; member 31 struct crypto_cipher *fallback; in p8_aes_init() local 34 fallback = crypto_alloc_cipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); in p8_aes_init() 35 if (IS_ERR(fallback)) { in p8_aes_init() 38 alg, PTR_ERR(fallback)); in p8_aes_init() 39 return PTR_ERR(fallback); in p8_aes_init() 42 crypto_cipher_set_flags(fallback, in p8_aes_init() 46 ctx->fallback = fallback; in p8_aes_init() 55 if (ctx->fallback) { in p8_aes_exit() 56 crypto_free_cipher(ctx->fallback); in p8_aes_exit() [all …]
|
| D | aes_cbc.c | 19 struct crypto_skcipher *fallback; member 27 struct crypto_skcipher *fallback; in p8_aes_cbc_init() local 29 fallback = crypto_alloc_skcipher("cbc(aes)", 0, in p8_aes_cbc_init() 32 if (IS_ERR(fallback)) { in p8_aes_cbc_init() 34 PTR_ERR(fallback)); in p8_aes_cbc_init() 35 return PTR_ERR(fallback); in p8_aes_cbc_init() 39 crypto_skcipher_reqsize(fallback)); in p8_aes_cbc_init() 40 ctx->fallback = fallback; in p8_aes_cbc_init() 48 crypto_free_skcipher(ctx->fallback); in p8_aes_cbc_exit() 66 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); in p8_aes_cbc_setkey() [all …]
|
| D | aes_ctr.c | 19 struct crypto_skcipher *fallback; member 26 struct crypto_skcipher *fallback; in p8_aes_ctr_init() local 28 fallback = crypto_alloc_skcipher("ctr(aes)", 0, in p8_aes_ctr_init() 31 if (IS_ERR(fallback)) { in p8_aes_ctr_init() 33 PTR_ERR(fallback)); in p8_aes_ctr_init() 34 return PTR_ERR(fallback); in p8_aes_ctr_init() 38 crypto_skcipher_reqsize(fallback)); in p8_aes_ctr_init() 39 ctx->fallback = fallback; in p8_aes_ctr_init() 47 crypto_free_skcipher(ctx->fallback); in p8_aes_ctr_exit() 64 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); in p8_aes_ctr_setkey() [all …]
|
| D | aes_xts.c | 20 struct crypto_skcipher *fallback; member 29 struct crypto_skcipher *fallback; in p8_aes_xts_init() local 31 fallback = crypto_alloc_skcipher("xts(aes)", 0, in p8_aes_xts_init() 34 if (IS_ERR(fallback)) { in p8_aes_xts_init() 36 PTR_ERR(fallback)); in p8_aes_xts_init() 37 return PTR_ERR(fallback); in p8_aes_xts_init() 41 crypto_skcipher_reqsize(fallback)); in p8_aes_xts_init() 42 ctx->fallback = fallback; in p8_aes_xts_init() 50 crypto_free_skcipher(ctx->fallback); in p8_aes_xts_exit() 73 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); in p8_aes_xts_setkey() [all …]
|
| /Linux-v5.15/drivers/firmware/broadcom/ |
| D | bcm47xx_sprom.c | 50 const char *name, char *buf, int len, bool fallback) in get_nvram_var() argument 58 if (fallback && err == -ENOENT && prefix) { in get_nvram_var() 68 type *val, type allset, bool fallback) \ 75 fallback); \ 97 u16 *val_lo, u16 *val_hi, bool fallback) in NVRAM_READ_VAL() 103 err = get_nvram_var(prefix, NULL, name, buf, sizeof(buf), fallback); in NVRAM_READ_VAL() 118 bool fallback) in nvram_read_leddc() argument 124 err = get_nvram_var(prefix, NULL, name, buf, sizeof(buf), fallback); in nvram_read_leddc() 142 u8 val[6], bool fallback) in nvram_read_macaddr() argument 147 err = get_nvram_var(prefix, NULL, name, buf, sizeof(buf), fallback); in nvram_read_macaddr() [all …]
|
| /Linux-v5.15/drivers/crypto/ |
| D | geode-aes.c | 129 tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in geode_setkey_cip() 130 tctx->fallback.cip->base.crt_flags |= in geode_setkey_cip() 133 return crypto_cipher_setkey(tctx->fallback.cip, key, len); in geode_setkey_cip() 155 crypto_skcipher_clear_flags(tctx->fallback.skcipher, in geode_setkey_skcipher() 157 crypto_skcipher_set_flags(tctx->fallback.skcipher, in geode_setkey_skcipher() 160 return crypto_skcipher_setkey(tctx->fallback.skcipher, key, len); in geode_setkey_skcipher() 169 crypto_cipher_encrypt_one(tctx->fallback.cip, out, in); in geode_encrypt() 184 crypto_cipher_decrypt_one(tctx->fallback.cip, out, in); in geode_decrypt() 197 tctx->fallback.cip = crypto_alloc_cipher(name, 0, in fallback_init_cip() 200 if (IS_ERR(tctx->fallback.cip)) { in fallback_init_cip() [all …]
|
| D | atmel-ecc.c | 42 struct crypto_kpp *fallback; member 99 return crypto_kpp_set_secret(ctx->fallback, buf, len); in atmel_ecdh_set_secret() 145 kpp_request_set_tfm(req, ctx->fallback); in atmel_ecdh_generate_public_key() 174 kpp_request_set_tfm(req, ctx->fallback); in atmel_ecdh_compute_shared_secret() 250 struct crypto_kpp *fallback; in atmel_ecdh_init_tfm() local 260 fallback = crypto_alloc_kpp(alg, 0, CRYPTO_ALG_NEED_FALLBACK); in atmel_ecdh_init_tfm() 261 if (IS_ERR(fallback)) { in atmel_ecdh_init_tfm() 263 alg, PTR_ERR(fallback)); in atmel_ecdh_init_tfm() 264 return PTR_ERR(fallback); in atmel_ecdh_init_tfm() 267 crypto_kpp_set_flags(fallback, crypto_kpp_get_flags(tfm)); in atmel_ecdh_init_tfm() [all …]
|
| D | padlock-sha.c | 25 struct shash_desc fallback; member 29 struct crypto_shash *fallback; member 37 dctx->fallback.tfm = ctx->fallback; in padlock_sha_init() 38 return crypto_shash_init(&dctx->fallback); in padlock_sha_init() 46 return crypto_shash_update(&dctx->fallback, data, length); in padlock_sha_update() 53 return crypto_shash_export(&dctx->fallback, out); in padlock_sha_export() 61 dctx->fallback.tfm = ctx->fallback; in padlock_sha_import() 62 return crypto_shash_import(&dctx->fallback, in); in padlock_sha_import() 87 err = crypto_shash_export(&dctx->fallback, &state); in padlock_sha1_finup() 92 return crypto_shash_finup(&dctx->fallback, in, count, out); in padlock_sha1_finup() [all …]
|
| D | sa2ul.c | 844 crypto_free_skcipher(ctx->fallback.skcipher); in sa_cipher_cra_exit() 874 ctx->fallback.skcipher = child; in sa_cipher_cra_init() 888 struct crypto_skcipher *child = ctx->fallback.skcipher; in sa_cipher_setkey() 1315 skcipher_request_set_tfm(subreq, ctx->fallback.skcipher); in sa_cipher_run() 1419 ahash_request_set_tfm(subreq, ctx->fallback.ahash); in sa_sha_run() 1512 ctx->fallback.ahash = in sa_sha_cra_init_alg() 1515 if (IS_ERR(ctx->fallback.ahash)) { in sa_sha_cra_init_alg() 1518 return PTR_ERR(ctx->fallback.ahash); in sa_sha_cra_init_alg() 1528 crypto_ahash_reqsize(ctx->fallback.ahash)); in sa_sha_cra_init_alg() 1547 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_init() [all …]
|
| /Linux-v5.15/arch/s390/crypto/ |
| D | aes_s390.c | 50 } fallback; member 58 struct crypto_skcipher *fallback; member 77 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in setkey_fallback_cip() 78 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & in setkey_fallback_cip() 81 return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); in setkey_fallback_cip() 110 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); in crypto_aes_encrypt() 121 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); in crypto_aes_decrypt() 133 sctx->fallback.cip = crypto_alloc_cipher(name, 0, in fallback_init_cip() 136 if (IS_ERR(sctx->fallback.cip)) { in fallback_init_cip() 139 return PTR_ERR(sctx->fallback.cip); in fallback_init_cip() [all …]
|
| /Linux-v5.15/Documentation/driver-api/firmware/ |
| D | fallback-mechanisms.rst | 5 A fallback mechanism is supported to allow to overcome failures to do a direct 8 configuration options related to supporting the firmware fallback mechanism are: 10 * CONFIG_FW_LOADER_USER_HELPER: enables building the firmware fallback 12 CONFIG_FW_LOADER_USER_HELPER_FALLBACK is disabled, only the custom fallback 15 enable the kobject uevent fallback mechanism on all firmware API calls 18 fallback mechanism: if this kconfig option is enabled and your second 20 informing the kernel that you have a custom fallback mechanism and it will 28 the kobject uevent fallback mechanism will never take effect even 31 Justifying the firmware fallback mechanism 36 fallback mechanism: [all …]
|
| /Linux-v5.15/scripts/dtc/ |
| D | dtc.c | 114 static const char *guess_type_by_name(const char *fname, const char *fallback) in guess_type_by_name() argument 120 return fallback; in guess_type_by_name() 129 return fallback; in guess_type_by_name() 132 static const char *guess_input_format(const char *fname, const char *fallback) in guess_input_format() argument 139 return fallback; in guess_input_format() 145 return fallback; in guess_input_format() 149 return fallback; in guess_input_format() 152 return fallback; in guess_input_format() 159 return guess_type_by_name(fname, fallback); in guess_input_format()
|
| /Linux-v5.15/drivers/base/firmware_loader/ |
| D | Kconfig | 72 bool "Enable the firmware sysfs fallback mechanism" 76 loading to the kernel through userspace as a fallback mechanism 81 built-in. For details on how to work with the sysfs fallback mechanism 82 refer to Documentation/driver-api/firmware/fallback-mechanisms.rst. 87 the requested firmware a sysfs fallback loading facility is made 90 this is known as the driver using the custom fallback mechanism. 91 If the custom fallback mechanism is used userspace must always 92 acknowledge failure to find firmware as the timeout for the fallback 98 can no longer be relied upon as a fallback mechanism. Linux no longer 99 relies on or uses a fallback mechanism in userspace. If you need to [all …]
|
| /Linux-v5.15/block/ |
| D | blk-mq-virtio.c | 31 goto fallback; in blk_mq_virtio_map_queues() 36 goto fallback; in blk_mq_virtio_map_queues() 43 fallback: in blk_mq_virtio_map_queues()
|
| D | blk-mq-rdma.c | 33 goto fallback; in blk_mq_rdma_map_queues() 41 fallback: in blk_mq_rdma_map_queues()
|
| D | blk-mq-pci.c | 35 goto fallback; in blk_mq_pci_map_queues() 43 fallback: in blk_mq_pci_map_queues()
|
| /Linux-v5.15/tools/testing/selftests/bpf/progs/ |
| D | bpf_dctcp.c | 20 volatile const char fallback[TCP_CA_NAME_MAX]; variable 65 if (!(tp->ecn_flags & TCP_ECN_OK) && fallback[0]) { in BPF_PROG() 68 (void *)fallback, sizeof(fallback)); in BPF_PROG() 76 (void *)fallback, sizeof(fallback)); in BPF_PROG()
|
| /Linux-v5.15/include/linux/ |
| D | bcm47xx_sprom.h | 16 bool fallback); 21 bool fallback) in bcm47xx_fill_sprom() argument
|
| /Linux-v5.15/Documentation/block/ |
| D | inline-encryption.rst | 27 To allow for testing, we also want a crypto API fallback when actual 84 blk-mq changes, other block layer changes and blk-crypto-fallback 93 We introduce ``block/blk-crypto-fallback.c``, which allows upper layers to remain 97 en/decrypt the bio with the blk-crypto-fallback. 102 concerned). ``blk-crypto-fallback`` sets the bounce bio's ``bi_end_io`` to an 106 is saved and overwritten by ``blk-crypto-fallback`` to 114 blk-crypto-fallback is used, the ciphertext written to disk (and hence the 120 ``blk-crypto-fallback``. We will eventually reach a point in blk-mq when a 154 blk-crypto-fallback, if hardware support isn't available for the desired 160 (either by real inline encryption hardware, or by the blk-crypto-fallback). [all …]
|
| /Linux-v5.15/drivers/gpu/drm/ |
| D | drm_edid_load.c | 269 char *edidname, *last, *colon, *fwstr, *edidstr, *fallback = NULL; in drm_load_edid_firmware() local 298 fallback = edidname; in drm_load_edid_firmware() 302 if (!fallback) { in drm_load_edid_firmware() 306 edidname = fallback; in drm_load_edid_firmware()
|
| /Linux-v5.15/kernel/bpf/preload/iterators/ |
| D | iterators.bpf.c | 59 static const char *get_name(struct btf *btf, long btf_id, const char *fallback) in get_name() argument 66 return fallback; in get_name() 72 return fallback; in get_name()
|
| /Linux-v5.15/scripts/atomic/ |
| D | gen-atomics.sh | 13 gen-atomic-fallback.sh linux/atomic/atomic-arch-fallback.h
|
| /Linux-v5.15/drivers/crypto/ccp/ |
| D | ccp-crypto-aes-xts.c | 111 unsigned int fallback = 0; in ccp_aes_xts_crypt() local 140 fallback = 1; in ccp_aes_xts_crypt() 143 fallback = 1; in ccp_aes_xts_crypt() 146 fallback = 1; in ccp_aes_xts_crypt() 147 if (fallback) { in ccp_aes_xts_crypt()
|
| /Linux-v5.15/mm/kfence/ |
| D | report.c | 53 int skipnr, fallback = 0; in get_stack_skipnr() local 83 fallback = skipnr + 1; in get_stack_skipnr() 93 if (fallback < num_entries) in get_stack_skipnr() 94 return fallback; in get_stack_skipnr()
|
| /Linux-v5.15/drivers/crypto/qce/ |
| D | skcipher.c | 205 ret = crypto_skcipher_setkey(ctx->fallback, key, keylen); in qce_skcipher_setkey() 295 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in qce_skcipher_crypt() 332 ctx->fallback = crypto_alloc_skcipher(crypto_tfm_alg_name(&tfm->base), in qce_skcipher_init_fallback() 334 if (IS_ERR(ctx->fallback)) in qce_skcipher_init_fallback() 335 return PTR_ERR(ctx->fallback); in qce_skcipher_init_fallback() 338 crypto_skcipher_reqsize(ctx->fallback)); in qce_skcipher_init_fallback() 346 crypto_free_skcipher(ctx->fallback); in qce_skcipher_exit()
|