Home
last modified time | relevance | path

Searched refs:ecb (Results 1 – 25 of 29) sorted by relevance

12

/Linux-v4.19/drivers/scsi/
Daha1740.c77 struct ecb ecb[AHA1740_ECBS]; member
88 static inline struct ecb *ecb_dma_to_cpu (struct Scsi_Host *host, in ecb_dma_to_cpu()
96 return (struct ecb *)(((char *) hdata->ecb) + (unsigned int) offset); in ecb_dma_to_cpu()
104 offset = (char *) cpu - (char *) hdata->ecb; in ecb_cpu_to_dma()
213 struct ecb *ecbptr; in aha1740_intr_handle()
279 memset(ecbptr,0,sizeof(struct ecb)); in aha1740_intr_handle()
353 if (!host->ecb[ecbno].cmdw) in aha1740_queuecommand_lck()
360 if (host->ecb[ecbno].cmdw) in aha1740_queuecommand_lck()
363 host->ecb[ecbno].cmdw = AHA1740CMD_INIT; /* SCSI Initiator Command in aha1740_queuecommand_lck()
373 host->ecb[ecbno].cdblen = SCpnt->cmd_len; /* SCSI Command in aha1740_queuecommand_lck()
[all …]
Daha1740.h105 struct ecb { /* Enhanced Control Block 6.1 */ struct
/Linux-v4.19/arch/x86/crypto/
Dcamellia_aesni_avx2_glue.c48 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
51 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
54 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
57 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
102 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
105 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
108 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
111 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
Dtwofish_glue_3way.c112 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
115 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
125 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_ctr_3way) }
128 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_ctr) }
138 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
141 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
Dserpent_avx2_glue.c52 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_16way) }
55 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
58 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
100 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_16way) }
103 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
106 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
Dtwofish_avx_glue.c111 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_enc_8way) }
114 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
117 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
156 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_dec_8way) }
159 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
162 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
Dcamellia_aesni_avx_glue.c70 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
73 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
76 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
115 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
118 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
121 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
Dserpent_sse2_glue.c97 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_enc_blk_xway) }
100 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
123 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_dec_blk_xway) }
126 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
Dserpent_avx_glue.c120 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
123 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
159 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
162 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
Dcast6_avx_glue.c88 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
91 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
127 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
130 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
Dcamellia_glue.c1333 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
1336 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
1359 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
1362 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
Dglue_helper.c63 gctx->funcs[i].fn_u.ecb(ctx, dst, src); in glue_ecb_req_128bit()
/Linux-v4.19/arch/s390/kvm/
Dvsie.c280 const bool wants_tx = READ_ONCE(scb_o->ecb) & ECB_TE; in shadow_scb()
281 bool had_tx = scb_s->ecb & ECB_TE; in shadow_scb()
288 scb_s->ecb = 0; in shadow_scb()
345 scb_s->ecb |= scb_o->ecb & ECB_HOSTPROTINT; in shadow_scb()
351 scb_s->ecb |= ECB_TE; in shadow_scb()
369 scb_s->ecb |= scb_o->ecb & ECB_GS; in shadow_scb()
456 if (!rc && (scb_s->ecb & ECB_TE)) in map_prefix()
585 if (gpa && (scb_s->ecb & ECB_TE)) { in pin_blocks()
635 if (((scb_s->ecb & ECB_GS) && !(scb_s->ecd & ECD_HOSTREGMGMT)) || in pin_blocks()
Dkvm-s390.h23 #define IS_TE_ENABLED(vcpu) ((vcpu->arch.sie_block->ecb & ECB_TE))
Dkvm-s390.c2639 vcpu->arch.sie_block->ecb |= ECB_HOSTPROTINT; in kvm_arch_vcpu_setup()
2641 vcpu->arch.sie_block->ecb |= ECB_SRSI; in kvm_arch_vcpu_setup()
2643 vcpu->arch.sie_block->ecb |= ECB_TE; in kvm_arch_vcpu_setup()
3546 vcpu->arch.sie_block->ecb |= ECB_GS; in sync_regs()
Dpriv.c66 vcpu->arch.sie_block->ecb |= ECB_GS; in handle_gs()
/Linux-v4.19/drivers/crypto/nx/
DMakefile6 nx-aes-ecb.o \
/Linux-v4.19/drivers/crypto/
Dmxs-dcp.c94 unsigned int ecb:1; member
227 if (rctx->ecb) in mxs_dcp_run_aes()
280 if (!rctx->ecb) { in mxs_dcp_aes_block_crypt()
401 static int mxs_dcp_aes_enqueue(struct ablkcipher_request *req, int enc, int ecb) in mxs_dcp_aes_enqueue() argument
413 rctx->ecb = ecb; in mxs_dcp_aes_enqueue()
/Linux-v4.19/arch/x86/include/asm/crypto/
Dglue_helper.h29 common_glue_func_t ecb; member
/Linux-v4.19/crypto/
DMakefile77 obj-$(CONFIG_CRYPTO_ECB) += ecb.o
/Linux-v4.19/arch/s390/include/asm/
Dkvm_host.h217 __u8 ecb; /* 0x0061 */ member
/Linux-v4.19/Documentation/crypto/
Ddevel-algos.rst111 Example of transformations: cbc(aes), ecb(arc4), ...
Darchitecture.rst40 - ecb(aes)
/Linux-v4.19/drivers/crypto/mediatek/
Dmtk-aes.c436 goto ecb; in mtk_aes_info_init()
452 ecb: in mtk_aes_info_init()
/Linux-v4.19/Documentation/device-mapper/
Ddm-integrity.txt124 "salsa20", "ctr(aes)" or "ecb(arc4)").

12