Home
last modified time | relevance | path

Searched refs:ecb (Results 1 – 25 of 28) sorted by relevance

12

/Linux-v5.4/drivers/scsi/
Daha1740.c77 struct ecb ecb[AHA1740_ECBS]; member
88 static inline struct ecb *ecb_dma_to_cpu (struct Scsi_Host *host, in ecb_dma_to_cpu()
96 return (struct ecb *)(((char *) hdata->ecb) + (unsigned int) offset); in ecb_dma_to_cpu()
104 offset = (char *) cpu - (char *) hdata->ecb; in ecb_cpu_to_dma()
213 struct ecb *ecbptr; in aha1740_intr_handle()
279 memset(ecbptr,0,sizeof(struct ecb)); in aha1740_intr_handle()
353 if (!host->ecb[ecbno].cmdw) in aha1740_queuecommand_lck()
360 if (host->ecb[ecbno].cmdw) in aha1740_queuecommand_lck()
363 host->ecb[ecbno].cmdw = AHA1740CMD_INIT; /* SCSI Initiator Command in aha1740_queuecommand_lck()
373 host->ecb[ecbno].cdblen = SCpnt->cmd_len; /* SCSI Command in aha1740_queuecommand_lck()
[all …]
Daha1740.h105 struct ecb { /* Enhanced Control Block 6.1 */ struct
/Linux-v5.4/arch/x86/crypto/
Dcamellia_aesni_avx2_glue.c43 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
46 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
49 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
52 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
97 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
100 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
103 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
106 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
Dtwofish_glue_3way.c97 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
100 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
110 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_ctr_3way) }
113 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_ctr) }
123 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
126 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
Dserpent_avx2_glue.c47 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_16way) }
50 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
53 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
95 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_16way) }
98 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
101 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
Dtwofish_avx_glue.c96 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_enc_8way) }
99 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
102 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
141 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_dec_8way) }
144 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
147 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
Dcamellia_aesni_avx_glue.c65 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
68 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
71 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
110 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
113 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
116 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
Dserpent_sse2_glue.c82 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_enc_blk_xway) }
85 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
108 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_dec_blk_xway) }
111 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
Dcast6_avx_glue.c73 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
76 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
112 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
115 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
Dserpent_avx_glue.c105 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
108 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
144 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
147 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
Dcamellia_glue.c1318 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
1321 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
1344 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
1347 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
Dglue_helper.c49 gctx->funcs[i].fn_u.ecb(ctx, dst, src); in glue_ecb_req_128bit()
/Linux-v5.4/arch/s390/kvm/
Dvsie.c444 const bool wants_tx = READ_ONCE(scb_o->ecb) & ECB_TE; in shadow_scb()
445 bool had_tx = scb_s->ecb & ECB_TE; in shadow_scb()
452 scb_s->ecb = 0; in shadow_scb()
509 scb_s->ecb |= scb_o->ecb & ECB_HOSTPROTINT; in shadow_scb()
515 scb_s->ecb |= ECB_TE; in shadow_scb()
533 scb_s->ecb |= scb_o->ecb & ECB_GS; in shadow_scb()
622 if (!rc && (scb_s->ecb & ECB_TE)) in map_prefix()
751 if (gpa && (scb_s->ecb & ECB_TE)) { in pin_blocks()
801 if (((scb_s->ecb & ECB_GS) && !(scb_s->ecd & ECD_HOSTREGMGMT)) || in pin_blocks()
Dkvm-s390.h23 #define IS_TE_ENABLED(vcpu) ((vcpu->arch.sie_block->ecb & ECB_TE))
Dkvm-s390.c2985 vcpu->arch.sie_block->ecb |= ECB_HOSTPROTINT; in kvm_arch_vcpu_setup()
2987 vcpu->arch.sie_block->ecb |= ECB_SRSI; in kvm_arch_vcpu_setup()
2989 vcpu->arch.sie_block->ecb |= ECB_TE; in kvm_arch_vcpu_setup()
3916 vcpu->arch.sie_block->ecb |= ECB_GS; in sync_regs()
Dpriv.c67 vcpu->arch.sie_block->ecb |= ECB_GS; in handle_gs()
/Linux-v5.4/drivers/crypto/nx/
DMakefile6 nx-aes-ecb.o \
/Linux-v5.4/drivers/crypto/
Dmxs-dcp.c106 unsigned int ecb:1; member
250 if (rctx->ecb) in mxs_dcp_run_aes()
306 if (!rctx->ecb) { in mxs_dcp_aes_block_crypt()
376 if (!rctx->ecb) { in mxs_dcp_aes_block_crypt()
447 static int mxs_dcp_aes_enqueue(struct ablkcipher_request *req, int enc, int ecb) in mxs_dcp_aes_enqueue() argument
459 rctx->ecb = ecb; in mxs_dcp_aes_enqueue()
/Linux-v5.4/arch/x86/include/asm/crypto/
Dglue_helper.h29 common_glue_func_t ecb; member
/Linux-v5.4/crypto/
DMakefile78 obj-$(CONFIG_CRYPTO_ECB) += ecb.o
/Linux-v5.4/Documentation/crypto/
Ddevel-algos.rst111 Example of transformations: cbc(aes), ecb(arc4), ...
Darchitecture.rst40 - ecb(aes)
/Linux-v5.4/drivers/crypto/mediatek/
Dmtk-aes.c438 goto ecb; in mtk_aes_info_init()
459 ecb: in mtk_aes_info_init()
/Linux-v5.4/arch/s390/include/asm/
Dkvm_host.h220 __u8 ecb; /* 0x0061 */ member
/Linux-v5.4/Documentation/admin-guide/device-mapper/
Ddm-integrity.rst147 "salsa20", "ctr(aes)" or "ecb(arc4)").

12