| /Linux-v5.10/drivers/crypto/vmx/ |
| D | aes.c | 22 struct crypto_cipher *fallback; member 30 struct crypto_cipher *fallback; in p8_aes_init() local 33 fallback = crypto_alloc_cipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); in p8_aes_init() 34 if (IS_ERR(fallback)) { in p8_aes_init() 37 alg, PTR_ERR(fallback)); in p8_aes_init() 38 return PTR_ERR(fallback); in p8_aes_init() 41 crypto_cipher_set_flags(fallback, in p8_aes_init() 45 ctx->fallback = fallback; in p8_aes_init() 54 if (ctx->fallback) { in p8_aes_exit() 55 crypto_free_cipher(ctx->fallback); in p8_aes_exit() [all …]
|
| D | aes_cbc.c | 19 struct crypto_skcipher *fallback; member 27 struct crypto_skcipher *fallback; in p8_aes_cbc_init() local 29 fallback = crypto_alloc_skcipher("cbc(aes)", 0, in p8_aes_cbc_init() 32 if (IS_ERR(fallback)) { in p8_aes_cbc_init() 34 PTR_ERR(fallback)); in p8_aes_cbc_init() 35 return PTR_ERR(fallback); in p8_aes_cbc_init() 39 crypto_skcipher_reqsize(fallback)); in p8_aes_cbc_init() 40 ctx->fallback = fallback; in p8_aes_cbc_init() 48 crypto_free_skcipher(ctx->fallback); in p8_aes_cbc_exit() 66 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); in p8_aes_cbc_setkey() [all …]
|
| D | aes_ctr.c | 19 struct crypto_skcipher *fallback; member 26 struct crypto_skcipher *fallback; in p8_aes_ctr_init() local 28 fallback = crypto_alloc_skcipher("ctr(aes)", 0, in p8_aes_ctr_init() 31 if (IS_ERR(fallback)) { in p8_aes_ctr_init() 33 PTR_ERR(fallback)); in p8_aes_ctr_init() 34 return PTR_ERR(fallback); in p8_aes_ctr_init() 38 crypto_skcipher_reqsize(fallback)); in p8_aes_ctr_init() 39 ctx->fallback = fallback; in p8_aes_ctr_init() 47 crypto_free_skcipher(ctx->fallback); in p8_aes_ctr_exit() 64 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); in p8_aes_ctr_setkey() [all …]
|
| D | aes_xts.c | 20 struct crypto_skcipher *fallback; member 29 struct crypto_skcipher *fallback; in p8_aes_xts_init() local 31 fallback = crypto_alloc_skcipher("xts(aes)", 0, in p8_aes_xts_init() 34 if (IS_ERR(fallback)) { in p8_aes_xts_init() 36 PTR_ERR(fallback)); in p8_aes_xts_init() 37 return PTR_ERR(fallback); in p8_aes_xts_init() 41 crypto_skcipher_reqsize(fallback)); in p8_aes_xts_init() 42 ctx->fallback = fallback; in p8_aes_xts_init() 50 crypto_free_skcipher(ctx->fallback); in p8_aes_xts_exit() 73 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen); in p8_aes_xts_setkey() [all …]
|
| /Linux-v5.10/drivers/firmware/broadcom/ |
| D | bcm47xx_sprom.c | 50 const char *name, char *buf, int len, bool fallback) in get_nvram_var() argument 58 if (fallback && err == -ENOENT && prefix) { in get_nvram_var() 68 type *val, type allset, bool fallback) \ 75 fallback); \ 97 u16 *val_lo, u16 *val_hi, bool fallback) in NVRAM_READ_VAL() 103 err = get_nvram_var(prefix, NULL, name, buf, sizeof(buf), fallback); in NVRAM_READ_VAL() 118 bool fallback) in nvram_read_leddc() argument 124 err = get_nvram_var(prefix, NULL, name, buf, sizeof(buf), fallback); in nvram_read_leddc() 142 u8 val[6], bool fallback) in nvram_read_macaddr() argument 147 err = get_nvram_var(prefix, NULL, name, buf, sizeof(buf), fallback); in nvram_read_macaddr() [all …]
|
| /Linux-v5.10/drivers/crypto/ |
| D | geode-aes.c | 128 tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in geode_setkey_cip() 129 tctx->fallback.cip->base.crt_flags |= in geode_setkey_cip() 132 return crypto_cipher_setkey(tctx->fallback.cip, key, len); in geode_setkey_cip() 154 crypto_skcipher_clear_flags(tctx->fallback.skcipher, in geode_setkey_skcipher() 156 crypto_skcipher_set_flags(tctx->fallback.skcipher, in geode_setkey_skcipher() 159 return crypto_skcipher_setkey(tctx->fallback.skcipher, key, len); in geode_setkey_skcipher() 168 crypto_cipher_encrypt_one(tctx->fallback.cip, out, in); in geode_encrypt() 183 crypto_cipher_decrypt_one(tctx->fallback.cip, out, in); in geode_decrypt() 196 tctx->fallback.cip = crypto_alloc_cipher(name, 0, in fallback_init_cip() 199 if (IS_ERR(tctx->fallback.cip)) { in fallback_init_cip() [all …]
|
| D | atmel-ecc.c | 43 struct crypto_kpp *fallback; member 111 return crypto_kpp_set_secret(ctx->fallback, buf, len); in atmel_ecdh_set_secret() 158 kpp_request_set_tfm(req, ctx->fallback); in atmel_ecdh_generate_public_key() 187 kpp_request_set_tfm(req, ctx->fallback); in atmel_ecdh_compute_shared_secret() 263 struct crypto_kpp *fallback; in atmel_ecdh_init_tfm() local 272 fallback = crypto_alloc_kpp(alg, 0, CRYPTO_ALG_NEED_FALLBACK); in atmel_ecdh_init_tfm() 273 if (IS_ERR(fallback)) { in atmel_ecdh_init_tfm() 275 alg, PTR_ERR(fallback)); in atmel_ecdh_init_tfm() 276 return PTR_ERR(fallback); in atmel_ecdh_init_tfm() 279 crypto_kpp_set_flags(fallback, crypto_kpp_get_flags(tfm)); in atmel_ecdh_init_tfm() [all …]
|
| D | padlock-sha.c | 24 struct shash_desc fallback; member 28 struct crypto_shash *fallback; member 36 dctx->fallback.tfm = ctx->fallback; in padlock_sha_init() 37 return crypto_shash_init(&dctx->fallback); in padlock_sha_init() 45 return crypto_shash_update(&dctx->fallback, data, length); in padlock_sha_update() 52 return crypto_shash_export(&dctx->fallback, out); in padlock_sha_export() 60 dctx->fallback.tfm = ctx->fallback; in padlock_sha_import() 61 return crypto_shash_import(&dctx->fallback, in); in padlock_sha_import() 86 err = crypto_shash_export(&dctx->fallback, &state); in padlock_sha1_finup() 91 return crypto_shash_finup(&dctx->fallback, in, count, out); in padlock_sha1_finup() [all …]
|
| D | sa2ul.c | 822 crypto_free_sync_skcipher(ctx->fallback.skcipher); in sa_cipher_cra_exit() 844 ctx->fallback.skcipher = in sa_cipher_cra_init() 847 if (IS_ERR(ctx->fallback.skcipher)) { in sa_cipher_cra_init() 849 return PTR_ERR(ctx->fallback.skcipher); in sa_cipher_cra_init() 877 crypto_sync_skcipher_clear_flags(ctx->fallback.skcipher, in sa_cipher_setkey() 879 crypto_sync_skcipher_set_flags(ctx->fallback.skcipher, in sa_cipher_setkey() 882 ret = crypto_sync_skcipher_setkey(ctx->fallback.skcipher, key, keylen); in sa_cipher_setkey() 1285 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback.skcipher); in sa_cipher_run() 1287 skcipher_request_set_sync_tfm(subreq, ctx->fallback.skcipher); in sa_cipher_run() 1393 ahash_request_set_tfm(subreq, ctx->fallback.ahash); in sa_sha_run() [all …]
|
| D | img-hash.c | 113 struct crypto_ahash *fallback; member 488 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_init() 552 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_update() 567 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_final() 581 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_finup() 597 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_import() 610 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_export() 678 ctx->fallback = crypto_alloc_ahash(alg_name, 0, in img_hash_cra_init() 680 if (IS_ERR(ctx->fallback)) { in img_hash_cra_init() 682 err = PTR_ERR(ctx->fallback); in img_hash_cra_init() [all …]
|
| /Linux-v5.10/scripts/dtc/ |
| D | dtc.c | 114 static const char *guess_type_by_name(const char *fname, const char *fallback) in guess_type_by_name() argument 120 return fallback; in guess_type_by_name() 127 return fallback; in guess_type_by_name() 130 static const char *guess_input_format(const char *fname, const char *fallback) in guess_input_format() argument 137 return fallback; in guess_input_format() 143 return fallback; in guess_input_format() 147 return fallback; in guess_input_format() 150 return fallback; in guess_input_format() 157 return guess_type_by_name(fname, fallback); in guess_input_format()
|
| /Linux-v5.10/arch/s390/crypto/ |
| D | aes_s390.c | 49 } fallback; member 57 struct crypto_skcipher *fallback; member 76 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in setkey_fallback_cip() 77 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & in setkey_fallback_cip() 80 return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); in setkey_fallback_cip() 109 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); in crypto_aes_encrypt() 120 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); in crypto_aes_decrypt() 132 sctx->fallback.cip = crypto_alloc_cipher(name, 0, in fallback_init_cip() 135 if (IS_ERR(sctx->fallback.cip)) { in fallback_init_cip() 138 return PTR_ERR(sctx->fallback.cip); in fallback_init_cip() [all …]
|
| /Linux-v5.10/Documentation/driver-api/firmware/ |
| D | fallback-mechanisms.rst | 5 A fallback mechanism is supported to allow to overcome failures to do a direct 8 configuration options related to supporting the firmware fallback mechanism are: 10 * CONFIG_FW_LOADER_USER_HELPER: enables building the firmware fallback 12 CONFIG_FW_LOADER_USER_HELPER_FALLBACK is disabled, only the custom fallback 15 enable the kobject uevent fallback mechanism on all firmware API calls 18 fallback mechanism: if this kconfig option is enabled and your second 20 informing the kernel that you have a custom fallback mechanism and it will 28 the kobject uevent fallback mechanism will never take effect even 31 Justifying the firmware fallback mechanism 36 fallback mechanism: [all …]
|
| /Linux-v5.10/scripts/atomic/ |
| D | gen-atomics.sh | 13 gen-atomic-fallback.sh linux/atomic-arch-fallback.h arch_ 14 gen-atomic-fallback.sh linux/atomic-fallback.h
|
| D | check-atomics.sh | 19 linux/atomic-arch-fallback.h 20 linux/atomic-fallback.h
|
| /Linux-v5.10/drivers/base/firmware_loader/ |
| D | Kconfig | 72 bool "Enable the firmware sysfs fallback mechanism" 76 loading to the kernel through userspace as a fallback mechanism 81 built-in. For details on how to work with the sysfs fallback mechanism 82 refer to Documentation/driver-api/firmware/fallback-mechanisms.rst. 87 the requested firmware a sysfs fallback loading facility is made 90 this is known as the driver using the custom fallback mechanism. 91 If the custom fallback mechanism is used userspace must always 92 acknowledge failure to find firmware as the timeout for the fallback 98 can no longer be relied upon as a fallback mechanism. Linux no longer 99 relies on or uses a fallback mechanism in userspace. If you need to [all …]
|
| /Linux-v5.10/block/ |
| D | blk-mq-virtio.c | 31 goto fallback; in blk_mq_virtio_map_queues() 36 goto fallback; in blk_mq_virtio_map_queues() 43 fallback: in blk_mq_virtio_map_queues()
|
| D | blk-mq-rdma.c | 33 goto fallback; in blk_mq_rdma_map_queues() 41 fallback: in blk_mq_rdma_map_queues()
|
| D | blk-mq-pci.c | 35 goto fallback; in blk_mq_pci_map_queues() 43 fallback: in blk_mq_pci_map_queues()
|
| /Linux-v5.10/include/linux/ |
| D | bcm47xx_sprom.h | 16 bool fallback); 21 bool fallback) in bcm47xx_fill_sprom() argument
|
| /Linux-v5.10/drivers/gpu/drm/ |
| D | drm_edid_load.c | 269 char *edidname, *last, *colon, *fwstr, *edidstr, *fallback = NULL; in drm_load_edid_firmware() local 298 fallback = edidname; in drm_load_edid_firmware() 302 if (!fallback) { in drm_load_edid_firmware() 306 edidname = fallback; in drm_load_edid_firmware()
|
| /Linux-v5.10/Documentation/block/ |
| D | inline-encryption.rst | 27 To allow for testing, we also want a crypto API fallback when actual 84 blk-mq changes, other block layer changes and blk-crypto-fallback 93 We introduce ``block/blk-crypto-fallback.c``, which allows upper layers to remain 97 en/decrypt the bio with the blk-crypto-fallback. 102 concerned). ``blk-crypto-fallback`` sets the bounce bio's ``bi_end_io`` to an 106 is saved and overwritten by ``blk-crypto-fallback`` to 114 blk-crypto-fallback is used, the ciphertext written to disk (and hence the 120 ``blk-crypto-fallback``. We will eventually reach a point in blk-mq when a 154 blk-crypto-fallback, if hardware support isn't available for the desired 160 (either by real inline encryption hardware, or by the blk-crypto-fallback). [all …]
|
| /Linux-v5.10/kernel/bpf/preload/iterators/ |
| D | iterators.bpf.c | 60 static const char *get_name(struct btf *btf, long btf_id, const char *fallback) in get_name() argument 67 return fallback; in get_name() 73 return fallback; in get_name()
|
| /Linux-v5.10/drivers/crypto/ccp/ |
| D | ccp-crypto-aes-xts.c | 111 unsigned int fallback = 0; in ccp_aes_xts_crypt() local 140 fallback = 1; in ccp_aes_xts_crypt() 143 fallback = 1; in ccp_aes_xts_crypt() 146 fallback = 1; in ccp_aes_xts_crypt() 147 if (fallback) { in ccp_aes_xts_crypt()
|
| /Linux-v5.10/drivers/crypto/qce/ |
| D | skcipher.c | 182 ret = crypto_skcipher_setkey(ctx->fallback, key, keylen); in qce_skcipher_setkey() 239 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in qce_skcipher_crypt() 276 ctx->fallback = crypto_alloc_skcipher(crypto_tfm_alg_name(&tfm->base), in qce_skcipher_init_fallback() 278 if (IS_ERR(ctx->fallback)) in qce_skcipher_init_fallback() 279 return PTR_ERR(ctx->fallback); in qce_skcipher_init_fallback() 282 crypto_skcipher_reqsize(ctx->fallback)); in qce_skcipher_init_fallback() 290 crypto_free_skcipher(ctx->fallback); in qce_skcipher_exit()
|