1 /*
2 * Shared descriptors for aead, ablkcipher algorithms
3 *
4 * Copyright 2016 NXP
5 */
6
7 #include "compat.h"
8 #include "desc_constr.h"
9 #include "caamalg_desc.h"
10
11 /*
12 * For aead functions, read payload and write payload,
13 * both of which are specified in req->src and req->dst
14 */
aead_append_src_dst(u32 * desc,u32 msg_type)15 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
16 {
17 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
18 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
19 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
20 }
21
22 /* Set DK bit in class 1 operation if shared */
append_dec_op1(u32 * desc,u32 type)23 static inline void append_dec_op1(u32 *desc, u32 type)
24 {
25 u32 *jump_cmd, *uncond_jump_cmd;
26
27 /* DK bit is valid only for AES */
28 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
29 append_operation(desc, type | OP_ALG_AS_INITFINAL |
30 OP_ALG_DECRYPT);
31 return;
32 }
33
34 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
35 append_operation(desc, type | OP_ALG_AS_INITFINAL |
36 OP_ALG_DECRYPT);
37 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 set_jump_tgt_here(desc, jump_cmd);
39 append_operation(desc, type | OP_ALG_AS_INITFINAL |
40 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
41 set_jump_tgt_here(desc, uncond_jump_cmd);
42 }
43
44 /**
45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46 * (non-protocol) with no (null) encryption.
47 * @desc: pointer to buffer used for descriptor construction
48 * @adata: pointer to authentication transform definitions.
49 * A split key is required for SEC Era < 6; the size of the split key
50 * is specified in this case. Valid algorithm values - one of
51 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
52 * with OP_ALG_AAI_HMAC_PRECOMP.
53 * @icvsize: integrity check value (ICV) size (truncated or full)
54 * @era: SEC Era
55 */
cnstr_shdsc_aead_null_encap(u32 * const desc,struct alginfo * adata,unsigned int icvsize,int era)56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
57 unsigned int icvsize, int era)
58 {
59 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
60
61 init_sh_desc(desc, HDR_SHARE_SERIAL);
62
63 /* Skip if already shared */
64 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
65 JUMP_COND_SHRD);
66 if (era < 6) {
67 if (adata->key_inline)
68 append_key_as_imm(desc, adata->key_virt,
69 adata->keylen_pad, adata->keylen,
70 CLASS_2 | KEY_DEST_MDHA_SPLIT |
71 KEY_ENC);
72 else
73 append_key(desc, adata->key_dma, adata->keylen,
74 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
75 } else {
76 append_proto_dkp(desc, adata);
77 }
78 set_jump_tgt_here(desc, key_jump_cmd);
79
80 /* assoclen + cryptlen = seqinlen */
81 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
82
83 /* Prepare to read and write cryptlen + assoclen bytes */
84 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
85 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
86
87 /*
88 * MOVE_LEN opcode is not available in all SEC HW revisions,
89 * thus need to do some magic, i.e. self-patch the descriptor
90 * buffer.
91 */
92 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
93 MOVE_DEST_MATH3 |
94 (0x6 << MOVE_LEN_SHIFT));
95 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
96 MOVE_DEST_DESCBUF |
97 MOVE_WAITCOMP |
98 (0x8 << MOVE_LEN_SHIFT));
99
100 /* Class 2 operation */
101 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
102 OP_ALG_ENCRYPT);
103
104 /* Read and write cryptlen bytes */
105 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
106
107 set_move_tgt_here(desc, read_move_cmd);
108 set_move_tgt_here(desc, write_move_cmd);
109 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
110 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
111 MOVE_AUX_LS);
112
113 /* Write ICV */
114 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
115 LDST_SRCDST_BYTE_CONTEXT);
116
117 #ifdef DEBUG
118 print_hex_dump(KERN_ERR,
119 "aead null enc shdesc@" __stringify(__LINE__)": ",
120 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
121 #endif
122 }
123 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
124
125 /**
126 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
127 * (non-protocol) with no (null) decryption.
128 * @desc: pointer to buffer used for descriptor construction
129 * @adata: pointer to authentication transform definitions.
130 * A split key is required for SEC Era < 6; the size of the split key
131 * is specified in this case. Valid algorithm values - one of
132 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
133 * with OP_ALG_AAI_HMAC_PRECOMP.
134 * @icvsize: integrity check value (ICV) size (truncated or full)
135 * @era: SEC Era
136 */
cnstr_shdsc_aead_null_decap(u32 * const desc,struct alginfo * adata,unsigned int icvsize,int era)137 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
138 unsigned int icvsize, int era)
139 {
140 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
141
142 init_sh_desc(desc, HDR_SHARE_SERIAL);
143
144 /* Skip if already shared */
145 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
146 JUMP_COND_SHRD);
147 if (era < 6) {
148 if (adata->key_inline)
149 append_key_as_imm(desc, adata->key_virt,
150 adata->keylen_pad, adata->keylen,
151 CLASS_2 | KEY_DEST_MDHA_SPLIT |
152 KEY_ENC);
153 else
154 append_key(desc, adata->key_dma, adata->keylen,
155 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
156 } else {
157 append_proto_dkp(desc, adata);
158 }
159 set_jump_tgt_here(desc, key_jump_cmd);
160
161 /* Class 2 operation */
162 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
163 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
164
165 /* assoclen + cryptlen = seqoutlen */
166 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
167
168 /* Prepare to read and write cryptlen + assoclen bytes */
169 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
170 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
171
172 /*
173 * MOVE_LEN opcode is not available in all SEC HW revisions,
174 * thus need to do some magic, i.e. self-patch the descriptor
175 * buffer.
176 */
177 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
178 MOVE_DEST_MATH2 |
179 (0x6 << MOVE_LEN_SHIFT));
180 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
181 MOVE_DEST_DESCBUF |
182 MOVE_WAITCOMP |
183 (0x8 << MOVE_LEN_SHIFT));
184
185 /* Read and write cryptlen bytes */
186 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
187
188 /*
189 * Insert a NOP here, since we need at least 4 instructions between
190 * code patching the descriptor buffer and the location being patched.
191 */
192 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
193 set_jump_tgt_here(desc, jump_cmd);
194
195 set_move_tgt_here(desc, read_move_cmd);
196 set_move_tgt_here(desc, write_move_cmd);
197 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
198 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
199 MOVE_AUX_LS);
200 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
201
202 /* Load ICV */
203 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
204 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
205
206 #ifdef DEBUG
207 print_hex_dump(KERN_ERR,
208 "aead null dec shdesc@" __stringify(__LINE__)": ",
209 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
210 #endif
211 }
212 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
213
init_sh_desc_key_aead(u32 * const desc,struct alginfo * const cdata,struct alginfo * const adata,const bool is_rfc3686,u32 * nonce,int era)214 static void init_sh_desc_key_aead(u32 * const desc,
215 struct alginfo * const cdata,
216 struct alginfo * const adata,
217 const bool is_rfc3686, u32 *nonce, int era)
218 {
219 u32 *key_jump_cmd;
220 unsigned int enckeylen = cdata->keylen;
221
222 /* Note: Context registers are saved. */
223 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
224
225 /* Skip if already shared */
226 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
227 JUMP_COND_SHRD);
228
229 /*
230 * RFC3686 specific:
231 * | key = {AUTH_KEY, ENC_KEY, NONCE}
232 * | enckeylen = encryption key size + nonce size
233 */
234 if (is_rfc3686)
235 enckeylen -= CTR_RFC3686_NONCE_SIZE;
236
237 if (era < 6) {
238 if (adata->key_inline)
239 append_key_as_imm(desc, adata->key_virt,
240 adata->keylen_pad, adata->keylen,
241 CLASS_2 | KEY_DEST_MDHA_SPLIT |
242 KEY_ENC);
243 else
244 append_key(desc, adata->key_dma, adata->keylen,
245 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
246 } else {
247 append_proto_dkp(desc, adata);
248 }
249
250 if (cdata->key_inline)
251 append_key_as_imm(desc, cdata->key_virt, enckeylen,
252 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
253 else
254 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
255 KEY_DEST_CLASS_REG);
256
257 /* Load Counter into CONTEXT1 reg */
258 if (is_rfc3686) {
259 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
260 LDST_CLASS_IND_CCB |
261 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
262 append_move(desc,
263 MOVE_SRC_OUTFIFO |
264 MOVE_DEST_CLASS1CTX |
265 (16 << MOVE_OFFSET_SHIFT) |
266 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
267 }
268
269 set_jump_tgt_here(desc, key_jump_cmd);
270 }
271
272 /**
273 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
274 * (non-protocol).
275 * @desc: pointer to buffer used for descriptor construction
276 * @cdata: pointer to block cipher transform definitions
277 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
278 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
279 * @adata: pointer to authentication transform definitions.
280 * A split key is required for SEC Era < 6; the size of the split key
281 * is specified in this case. Valid algorithm values - one of
282 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
283 * with OP_ALG_AAI_HMAC_PRECOMP.
284 * @ivsize: initialization vector size
285 * @icvsize: integrity check value (ICV) size (truncated or full)
286 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
287 * @nonce: pointer to rfc3686 nonce
288 * @ctx1_iv_off: IV offset in CONTEXT1 register
289 * @is_qi: true when called from caam/qi
290 * @era: SEC Era
291 */
cnstr_shdsc_aead_encap(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool is_rfc3686,u32 * nonce,const u32 ctx1_iv_off,const bool is_qi,int era)292 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
293 struct alginfo *adata, unsigned int ivsize,
294 unsigned int icvsize, const bool is_rfc3686,
295 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
296 int era)
297 {
298 /* Note: Context registers are saved. */
299 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
300
301 /* Class 2 operation */
302 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
303 OP_ALG_ENCRYPT);
304
305 if (is_qi) {
306 u32 *wait_load_cmd;
307
308 /* REG3 = assoclen */
309 append_seq_load(desc, 4, LDST_CLASS_DECO |
310 LDST_SRCDST_WORD_DECO_MATH3 |
311 (4 << LDST_OFFSET_SHIFT));
312
313 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
314 JUMP_COND_CALM | JUMP_COND_NCP |
315 JUMP_COND_NOP | JUMP_COND_NIP |
316 JUMP_COND_NIFP);
317 set_jump_tgt_here(desc, wait_load_cmd);
318
319 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
320 LDST_SRCDST_BYTE_CONTEXT |
321 (ctx1_iv_off << LDST_OFFSET_SHIFT));
322 }
323
324 /* Read and write assoclen bytes */
325 if (is_qi || era < 3) {
326 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
327 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
328 } else {
329 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
330 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
331 }
332
333 /* Skip assoc data */
334 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
335
336 /* read assoc before reading payload */
337 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
338 FIFOLDST_VLF);
339
340 /* Load Counter into CONTEXT1 reg */
341 if (is_rfc3686)
342 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
343 LDST_SRCDST_BYTE_CONTEXT |
344 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
345 LDST_OFFSET_SHIFT));
346
347 /* Class 1 operation */
348 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
349 OP_ALG_ENCRYPT);
350
351 /* Read and write cryptlen bytes */
352 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
353 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
354 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
355
356 /* Write ICV */
357 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
358 LDST_SRCDST_BYTE_CONTEXT);
359
360 #ifdef DEBUG
361 print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
362 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
363 #endif
364 }
365 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
366
367 /**
368 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
369 * (non-protocol).
370 * @desc: pointer to buffer used for descriptor construction
371 * @cdata: pointer to block cipher transform definitions
372 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
373 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
374 * @adata: pointer to authentication transform definitions.
375 * A split key is required for SEC Era < 6; the size of the split key
376 * is specified in this case. Valid algorithm values - one of
377 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
378 * with OP_ALG_AAI_HMAC_PRECOMP.
379 * @ivsize: initialization vector size
380 * @icvsize: integrity check value (ICV) size (truncated or full)
381 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
382 * @nonce: pointer to rfc3686 nonce
383 * @ctx1_iv_off: IV offset in CONTEXT1 register
384 * @is_qi: true when called from caam/qi
385 * @era: SEC Era
386 */
cnstr_shdsc_aead_decap(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool geniv,const bool is_rfc3686,u32 * nonce,const u32 ctx1_iv_off,const bool is_qi,int era)387 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
388 struct alginfo *adata, unsigned int ivsize,
389 unsigned int icvsize, const bool geniv,
390 const bool is_rfc3686, u32 *nonce,
391 const u32 ctx1_iv_off, const bool is_qi, int era)
392 {
393 /* Note: Context registers are saved. */
394 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
395
396 /* Class 2 operation */
397 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
398 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
399
400 if (is_qi) {
401 u32 *wait_load_cmd;
402
403 /* REG3 = assoclen */
404 append_seq_load(desc, 4, LDST_CLASS_DECO |
405 LDST_SRCDST_WORD_DECO_MATH3 |
406 (4 << LDST_OFFSET_SHIFT));
407
408 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
409 JUMP_COND_CALM | JUMP_COND_NCP |
410 JUMP_COND_NOP | JUMP_COND_NIP |
411 JUMP_COND_NIFP);
412 set_jump_tgt_here(desc, wait_load_cmd);
413
414 if (!geniv)
415 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
416 LDST_SRCDST_BYTE_CONTEXT |
417 (ctx1_iv_off << LDST_OFFSET_SHIFT));
418 }
419
420 /* Read and write assoclen bytes */
421 if (is_qi || era < 3) {
422 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
423 if (geniv)
424 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
425 ivsize);
426 else
427 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
428 CAAM_CMD_SZ);
429 } else {
430 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
431 if (geniv)
432 append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
433 ivsize);
434 else
435 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
436 CAAM_CMD_SZ);
437 }
438
439 /* Skip assoc data */
440 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
441
442 /* read assoc before reading payload */
443 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
444 KEY_VLF);
445
446 if (geniv) {
447 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
448 LDST_SRCDST_BYTE_CONTEXT |
449 (ctx1_iv_off << LDST_OFFSET_SHIFT));
450 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
451 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
452 }
453
454 /* Load Counter into CONTEXT1 reg */
455 if (is_rfc3686)
456 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
457 LDST_SRCDST_BYTE_CONTEXT |
458 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
459 LDST_OFFSET_SHIFT));
460
461 /* Choose operation */
462 if (ctx1_iv_off)
463 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
464 OP_ALG_DECRYPT);
465 else
466 append_dec_op1(desc, cdata->algtype);
467
468 /* Read and write cryptlen bytes */
469 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
470 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
471 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
472
473 /* Load ICV */
474 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
475 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
476
477 #ifdef DEBUG
478 print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
479 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
480 #endif
481 }
482 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
483
484 /**
485 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
486 * (non-protocol) with HW-generated initialization
487 * vector.
488 * @desc: pointer to buffer used for descriptor construction
489 * @cdata: pointer to block cipher transform definitions
490 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
491 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
492 * @adata: pointer to authentication transform definitions.
493 * A split key is required for SEC Era < 6; the size of the split key
494 * is specified in this case. Valid algorithm values - one of
495 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
496 * with OP_ALG_AAI_HMAC_PRECOMP.
497 * @ivsize: initialization vector size
498 * @icvsize: integrity check value (ICV) size (truncated or full)
499 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
500 * @nonce: pointer to rfc3686 nonce
501 * @ctx1_iv_off: IV offset in CONTEXT1 register
502 * @is_qi: true when called from caam/qi
503 * @era: SEC Era
504 */
cnstr_shdsc_aead_givencap(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool is_rfc3686,u32 * nonce,const u32 ctx1_iv_off,const bool is_qi,int era)505 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
506 struct alginfo *adata, unsigned int ivsize,
507 unsigned int icvsize, const bool is_rfc3686,
508 u32 *nonce, const u32 ctx1_iv_off,
509 const bool is_qi, int era)
510 {
511 u32 geniv, moveiv;
512
513 /* Note: Context registers are saved. */
514 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
515
516 if (is_qi) {
517 u32 *wait_load_cmd;
518
519 /* REG3 = assoclen */
520 append_seq_load(desc, 4, LDST_CLASS_DECO |
521 LDST_SRCDST_WORD_DECO_MATH3 |
522 (4 << LDST_OFFSET_SHIFT));
523
524 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
525 JUMP_COND_CALM | JUMP_COND_NCP |
526 JUMP_COND_NOP | JUMP_COND_NIP |
527 JUMP_COND_NIFP);
528 set_jump_tgt_here(desc, wait_load_cmd);
529 }
530
531 if (is_rfc3686) {
532 if (is_qi)
533 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
534 LDST_SRCDST_BYTE_CONTEXT |
535 (ctx1_iv_off << LDST_OFFSET_SHIFT));
536
537 goto copy_iv;
538 }
539
540 /* Generate IV */
541 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
542 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
543 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
544 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
545 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
546 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
547 append_move(desc, MOVE_WAITCOMP |
548 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
549 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
550 (ivsize << MOVE_LEN_SHIFT));
551 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
552
553 copy_iv:
554 /* Copy IV to class 1 context */
555 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
556 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
557 (ivsize << MOVE_LEN_SHIFT));
558
559 /* Return to encryption */
560 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
561 OP_ALG_ENCRYPT);
562
563 /* Read and write assoclen bytes */
564 if (is_qi || era < 3) {
565 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
566 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
567 } else {
568 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
569 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
570 }
571
572 /* Skip assoc data */
573 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
574
575 /* read assoc before reading payload */
576 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
577 KEY_VLF);
578
579 /* Copy iv from outfifo to class 2 fifo */
580 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
581 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
582 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
583 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
584 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
585 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
586
587 /* Load Counter into CONTEXT1 reg */
588 if (is_rfc3686)
589 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
590 LDST_SRCDST_BYTE_CONTEXT |
591 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
592 LDST_OFFSET_SHIFT));
593
594 /* Class 1 operation */
595 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
596 OP_ALG_ENCRYPT);
597
598 /* Will write ivsize + cryptlen */
599 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
600
601 /* Not need to reload iv */
602 append_seq_fifo_load(desc, ivsize,
603 FIFOLD_CLASS_SKIP);
604
605 /* Will read cryptlen */
606 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
607 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
608 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
609 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
610
611 /* Write ICV */
612 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
613 LDST_SRCDST_BYTE_CONTEXT);
614
615 #ifdef DEBUG
616 print_hex_dump(KERN_ERR,
617 "aead givenc shdesc@" __stringify(__LINE__)": ",
618 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
619 #endif
620 }
621 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
622
623 /**
624 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
625 * @desc: pointer to buffer used for descriptor construction
626 * @cdata: pointer to block cipher transform definitions
627 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
628 * @ivsize: initialization vector size
629 * @icvsize: integrity check value (ICV) size (truncated or full)
630 * @is_qi: true when called from caam/qi
631 */
cnstr_shdsc_gcm_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)632 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
633 unsigned int ivsize, unsigned int icvsize,
634 const bool is_qi)
635 {
636 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
637 *zero_assoc_jump_cmd2;
638
639 init_sh_desc(desc, HDR_SHARE_SERIAL);
640
641 /* skip key loading if they are loaded due to sharing */
642 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
643 JUMP_COND_SHRD);
644 if (cdata->key_inline)
645 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
646 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
647 else
648 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
649 KEY_DEST_CLASS_REG);
650 set_jump_tgt_here(desc, key_jump_cmd);
651
652 /* class 1 operation */
653 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
654 OP_ALG_ENCRYPT);
655
656 if (is_qi) {
657 u32 *wait_load_cmd;
658
659 /* REG3 = assoclen */
660 append_seq_load(desc, 4, LDST_CLASS_DECO |
661 LDST_SRCDST_WORD_DECO_MATH3 |
662 (4 << LDST_OFFSET_SHIFT));
663
664 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
665 JUMP_COND_CALM | JUMP_COND_NCP |
666 JUMP_COND_NOP | JUMP_COND_NIP |
667 JUMP_COND_NIFP);
668 set_jump_tgt_here(desc, wait_load_cmd);
669
670 append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
671 ivsize);
672 } else {
673 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
674 CAAM_CMD_SZ);
675 }
676
677 /* if assoclen + cryptlen is ZERO, skip to ICV write */
678 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
679 JUMP_COND_MATH_Z);
680
681 if (is_qi)
682 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
683 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
684
685 /* if assoclen is ZERO, skip reading the assoc data */
686 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
687 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
688 JUMP_COND_MATH_Z);
689
690 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
691
692 /* skip assoc data */
693 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
694
695 /* cryptlen = seqinlen - assoclen */
696 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
697
698 /* if cryptlen is ZERO jump to zero-payload commands */
699 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
700 JUMP_COND_MATH_Z);
701
702 /* read assoc data */
703 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
704 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
705 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
706
707 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
708
709 /* write encrypted data */
710 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
711
712 /* read payload data */
713 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
714 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
715
716 /* jump to ICV writing */
717 if (is_qi)
718 append_jump(desc, JUMP_TEST_ALL | 4);
719 else
720 append_jump(desc, JUMP_TEST_ALL | 2);
721
722 /* zero-payload commands */
723 set_jump_tgt_here(desc, zero_payload_jump_cmd);
724
725 /* read assoc data */
726 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
727 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
728 if (is_qi)
729 /* jump to ICV writing */
730 append_jump(desc, JUMP_TEST_ALL | 2);
731
732 /* There is no input data */
733 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
734
735 if (is_qi)
736 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
737 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
738 FIFOLD_TYPE_LAST1);
739
740 /* write ICV */
741 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
742 LDST_SRCDST_BYTE_CONTEXT);
743
744 #ifdef DEBUG
745 print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
746 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
747 #endif
748 }
749 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
750
751 /**
752 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
753 * @desc: pointer to buffer used for descriptor construction
754 * @cdata: pointer to block cipher transform definitions
755 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
756 * @ivsize: initialization vector size
757 * @icvsize: integrity check value (ICV) size (truncated or full)
758 * @is_qi: true when called from caam/qi
759 */
cnstr_shdsc_gcm_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)760 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
761 unsigned int ivsize, unsigned int icvsize,
762 const bool is_qi)
763 {
764 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
765
766 init_sh_desc(desc, HDR_SHARE_SERIAL);
767
768 /* skip key loading if they are loaded due to sharing */
769 key_jump_cmd = append_jump(desc, JUMP_JSL |
770 JUMP_TEST_ALL | JUMP_COND_SHRD);
771 if (cdata->key_inline)
772 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
773 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
774 else
775 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
776 KEY_DEST_CLASS_REG);
777 set_jump_tgt_here(desc, key_jump_cmd);
778
779 /* class 1 operation */
780 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
781 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
782
783 if (is_qi) {
784 u32 *wait_load_cmd;
785
786 /* REG3 = assoclen */
787 append_seq_load(desc, 4, LDST_CLASS_DECO |
788 LDST_SRCDST_WORD_DECO_MATH3 |
789 (4 << LDST_OFFSET_SHIFT));
790
791 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
792 JUMP_COND_CALM | JUMP_COND_NCP |
793 JUMP_COND_NOP | JUMP_COND_NIP |
794 JUMP_COND_NIFP);
795 set_jump_tgt_here(desc, wait_load_cmd);
796
797 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
798 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
799 }
800
801 /* if assoclen is ZERO, skip reading the assoc data */
802 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
803 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
804 JUMP_COND_MATH_Z);
805
806 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
807
808 /* skip assoc data */
809 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
810
811 /* read assoc data */
812 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
813 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
814
815 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
816
817 /* cryptlen = seqoutlen - assoclen */
818 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
819
820 /* jump to zero-payload command if cryptlen is zero */
821 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
822 JUMP_COND_MATH_Z);
823
824 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
825
826 /* store encrypted data */
827 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
828
829 /* read payload data */
830 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
831 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
832
833 /* zero-payload command */
834 set_jump_tgt_here(desc, zero_payload_jump_cmd);
835
836 /* read ICV */
837 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
838 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
839
840 #ifdef DEBUG
841 print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
842 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
843 #endif
844 }
845 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
846
847 /**
848 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
849 * (non-protocol).
850 * @desc: pointer to buffer used for descriptor construction
851 * @cdata: pointer to block cipher transform definitions
852 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
853 * @ivsize: initialization vector size
854 * @icvsize: integrity check value (ICV) size (truncated or full)
855 * @is_qi: true when called from caam/qi
856 */
cnstr_shdsc_rfc4106_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)857 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
858 unsigned int ivsize, unsigned int icvsize,
859 const bool is_qi)
860 {
861 u32 *key_jump_cmd;
862
863 init_sh_desc(desc, HDR_SHARE_SERIAL);
864
865 /* Skip key loading if it is loaded due to sharing */
866 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
867 JUMP_COND_SHRD);
868 if (cdata->key_inline)
869 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
870 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
871 else
872 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
873 KEY_DEST_CLASS_REG);
874 set_jump_tgt_here(desc, key_jump_cmd);
875
876 /* Class 1 operation */
877 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
878 OP_ALG_ENCRYPT);
879
880 if (is_qi) {
881 u32 *wait_load_cmd;
882
883 /* REG3 = assoclen */
884 append_seq_load(desc, 4, LDST_CLASS_DECO |
885 LDST_SRCDST_WORD_DECO_MATH3 |
886 (4 << LDST_OFFSET_SHIFT));
887
888 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
889 JUMP_COND_CALM | JUMP_COND_NCP |
890 JUMP_COND_NOP | JUMP_COND_NIP |
891 JUMP_COND_NIFP);
892 set_jump_tgt_here(desc, wait_load_cmd);
893
894 /* Read salt and IV */
895 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
896 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
897 FIFOLD_TYPE_IV);
898 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
899 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
900 }
901
902 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
903 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
904
905 /* Read assoc data */
906 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
907 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
908
909 /* Skip IV */
910 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
911
912 /* Will read cryptlen bytes */
913 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
914
915 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
916 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
917
918 /* Skip assoc data */
919 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
920
921 /* cryptlen = seqoutlen - assoclen */
922 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
923
924 /* Write encrypted data */
925 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
926
927 /* Read payload data */
928 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
929 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
930
931 /* Write ICV */
932 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
933 LDST_SRCDST_BYTE_CONTEXT);
934
935 #ifdef DEBUG
936 print_hex_dump(KERN_ERR,
937 "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
938 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
939 #endif
940 }
941 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
942
943 /**
944 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
945 * (non-protocol).
946 * @desc: pointer to buffer used for descriptor construction
947 * @cdata: pointer to block cipher transform definitions
948 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
949 * @ivsize: initialization vector size
950 * @icvsize: integrity check value (ICV) size (truncated or full)
951 * @is_qi: true when called from caam/qi
952 */
cnstr_shdsc_rfc4106_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)953 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
954 unsigned int ivsize, unsigned int icvsize,
955 const bool is_qi)
956 {
957 u32 *key_jump_cmd;
958
959 init_sh_desc(desc, HDR_SHARE_SERIAL);
960
961 /* Skip key loading if it is loaded due to sharing */
962 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
963 JUMP_COND_SHRD);
964 if (cdata->key_inline)
965 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
966 cdata->keylen, CLASS_1 |
967 KEY_DEST_CLASS_REG);
968 else
969 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
970 KEY_DEST_CLASS_REG);
971 set_jump_tgt_here(desc, key_jump_cmd);
972
973 /* Class 1 operation */
974 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
975 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
976
977 if (is_qi) {
978 u32 *wait_load_cmd;
979
980 /* REG3 = assoclen */
981 append_seq_load(desc, 4, LDST_CLASS_DECO |
982 LDST_SRCDST_WORD_DECO_MATH3 |
983 (4 << LDST_OFFSET_SHIFT));
984
985 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
986 JUMP_COND_CALM | JUMP_COND_NCP |
987 JUMP_COND_NOP | JUMP_COND_NIP |
988 JUMP_COND_NIFP);
989 set_jump_tgt_here(desc, wait_load_cmd);
990
991 /* Read salt and IV */
992 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
993 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
994 FIFOLD_TYPE_IV);
995 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
996 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
997 }
998
999 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1000 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1001
1002 /* Read assoc data */
1003 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1004 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1005
1006 /* Skip IV */
1007 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1008
1009 /* Will read cryptlen bytes */
1010 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1011
1012 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1013 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1014
1015 /* Skip assoc data */
1016 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1017
1018 /* Will write cryptlen bytes */
1019 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1020
1021 /* Store payload data */
1022 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1023
1024 /* Read encrypted data */
1025 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1026 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1027
1028 /* Read ICV */
1029 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1030 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1031
1032 #ifdef DEBUG
1033 print_hex_dump(KERN_ERR,
1034 "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1035 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1036 #endif
1037 }
1038 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1039
1040 /**
1041 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1042 * (non-protocol).
1043 * @desc: pointer to buffer used for descriptor construction
1044 * @cdata: pointer to block cipher transform definitions
1045 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1046 * @ivsize: initialization vector size
1047 * @icvsize: integrity check value (ICV) size (truncated or full)
1048 * @is_qi: true when called from caam/qi
1049 */
cnstr_shdsc_rfc4543_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)1050 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1051 unsigned int ivsize, unsigned int icvsize,
1052 const bool is_qi)
1053 {
1054 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1055
1056 init_sh_desc(desc, HDR_SHARE_SERIAL);
1057
1058 /* Skip key loading if it is loaded due to sharing */
1059 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1060 JUMP_COND_SHRD);
1061 if (cdata->key_inline)
1062 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1063 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1064 else
1065 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1066 KEY_DEST_CLASS_REG);
1067 set_jump_tgt_here(desc, key_jump_cmd);
1068
1069 /* Class 1 operation */
1070 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1071 OP_ALG_ENCRYPT);
1072
1073 if (is_qi) {
1074 /* assoclen is not needed, skip it */
1075 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1076
1077 /* Read salt and IV */
1078 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1079 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1080 FIFOLD_TYPE_IV);
1081 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1082 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1083 }
1084
1085 /* assoclen + cryptlen = seqinlen */
1086 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1087
1088 /*
1089 * MOVE_LEN opcode is not available in all SEC HW revisions,
1090 * thus need to do some magic, i.e. self-patch the descriptor
1091 * buffer.
1092 */
1093 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1094 (0x6 << MOVE_LEN_SHIFT));
1095 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1096 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1097
1098 /* Will read assoclen + cryptlen bytes */
1099 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1100
1101 /* Will write assoclen + cryptlen bytes */
1102 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1103
1104 /* Read and write assoclen + cryptlen bytes */
1105 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1106
1107 set_move_tgt_here(desc, read_move_cmd);
1108 set_move_tgt_here(desc, write_move_cmd);
1109 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1110 /* Move payload data to OFIFO */
1111 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1112
1113 /* Write ICV */
1114 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1115 LDST_SRCDST_BYTE_CONTEXT);
1116
1117 #ifdef DEBUG
1118 print_hex_dump(KERN_ERR,
1119 "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1120 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1121 #endif
1122 }
1123 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1124
1125 /**
1126 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1127 * (non-protocol).
1128 * @desc: pointer to buffer used for descriptor construction
1129 * @cdata: pointer to block cipher transform definitions
1130 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1131 * @ivsize: initialization vector size
1132 * @icvsize: integrity check value (ICV) size (truncated or full)
1133 * @is_qi: true when called from caam/qi
1134 */
cnstr_shdsc_rfc4543_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)1135 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1136 unsigned int ivsize, unsigned int icvsize,
1137 const bool is_qi)
1138 {
1139 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1140
1141 init_sh_desc(desc, HDR_SHARE_SERIAL);
1142
1143 /* Skip key loading if it is loaded due to sharing */
1144 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1145 JUMP_COND_SHRD);
1146 if (cdata->key_inline)
1147 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1148 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1149 else
1150 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1151 KEY_DEST_CLASS_REG);
1152 set_jump_tgt_here(desc, key_jump_cmd);
1153
1154 /* Class 1 operation */
1155 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1156 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1157
1158 if (is_qi) {
1159 /* assoclen is not needed, skip it */
1160 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1161
1162 /* Read salt and IV */
1163 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1164 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1165 FIFOLD_TYPE_IV);
1166 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1167 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1168 }
1169
1170 /* assoclen + cryptlen = seqoutlen */
1171 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1172
1173 /*
1174 * MOVE_LEN opcode is not available in all SEC HW revisions,
1175 * thus need to do some magic, i.e. self-patch the descriptor
1176 * buffer.
1177 */
1178 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1179 (0x6 << MOVE_LEN_SHIFT));
1180 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1181 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1182
1183 /* Will read assoclen + cryptlen bytes */
1184 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1185
1186 /* Will write assoclen + cryptlen bytes */
1187 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1188
1189 /* Store payload data */
1190 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1191
1192 /* In-snoop assoclen + cryptlen data */
1193 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1194 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1195
1196 set_move_tgt_here(desc, read_move_cmd);
1197 set_move_tgt_here(desc, write_move_cmd);
1198 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1199 /* Move payload data to OFIFO */
1200 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1201 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1202
1203 /* Read ICV */
1204 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1205 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1206
1207 #ifdef DEBUG
1208 print_hex_dump(KERN_ERR,
1209 "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1210 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1211 #endif
1212 }
1213 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1214
1215 /*
1216 * For ablkcipher encrypt and decrypt, read from req->src and
1217 * write to req->dst
1218 */
ablkcipher_append_src_dst(u32 * desc)1219 static inline void ablkcipher_append_src_dst(u32 *desc)
1220 {
1221 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1222 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1223 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1224 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1225 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1226 }
1227
1228 /**
1229 * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
1230 * @desc: pointer to buffer used for descriptor construction
1231 * @cdata: pointer to block cipher transform definitions
1232 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1233 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1234 * @ivsize: initialization vector size
1235 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1236 * @ctx1_iv_off: IV offset in CONTEXT1 register
1237 */
cnstr_shdsc_ablkcipher_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,const bool is_rfc3686,const u32 ctx1_iv_off)1238 void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
1239 unsigned int ivsize, const bool is_rfc3686,
1240 const u32 ctx1_iv_off)
1241 {
1242 u32 *key_jump_cmd;
1243
1244 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1245 /* Skip if already shared */
1246 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1247 JUMP_COND_SHRD);
1248
1249 /* Load class1 key only */
1250 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1251 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1252
1253 /* Load nonce into CONTEXT1 reg */
1254 if (is_rfc3686) {
1255 const u8 *nonce = cdata->key_virt + cdata->keylen;
1256
1257 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1258 LDST_CLASS_IND_CCB |
1259 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1260 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1261 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1262 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1263 }
1264
1265 set_jump_tgt_here(desc, key_jump_cmd);
1266
1267 /* Load iv */
1268 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1269 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1270
1271 /* Load counter into CONTEXT1 reg */
1272 if (is_rfc3686)
1273 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1274 LDST_SRCDST_BYTE_CONTEXT |
1275 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1276 LDST_OFFSET_SHIFT));
1277
1278 /* Load operation */
1279 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1280 OP_ALG_ENCRYPT);
1281
1282 /* Perform operation */
1283 ablkcipher_append_src_dst(desc);
1284
1285 #ifdef DEBUG
1286 print_hex_dump(KERN_ERR,
1287 "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
1288 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1289 #endif
1290 }
1291 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
1292
1293 /**
1294 * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
1295 * @desc: pointer to buffer used for descriptor construction
1296 * @cdata: pointer to block cipher transform definitions
1297 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1298 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1299 * @ivsize: initialization vector size
1300 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1301 * @ctx1_iv_off: IV offset in CONTEXT1 register
1302 */
cnstr_shdsc_ablkcipher_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,const bool is_rfc3686,const u32 ctx1_iv_off)1303 void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
1304 unsigned int ivsize, const bool is_rfc3686,
1305 const u32 ctx1_iv_off)
1306 {
1307 u32 *key_jump_cmd;
1308
1309 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1310 /* Skip if already shared */
1311 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1312 JUMP_COND_SHRD);
1313
1314 /* Load class1 key only */
1315 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1316 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1317
1318 /* Load nonce into CONTEXT1 reg */
1319 if (is_rfc3686) {
1320 const u8 *nonce = cdata->key_virt + cdata->keylen;
1321
1322 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1323 LDST_CLASS_IND_CCB |
1324 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1325 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1326 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1327 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1328 }
1329
1330 set_jump_tgt_here(desc, key_jump_cmd);
1331
1332 /* load IV */
1333 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1334 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1335
1336 /* Load counter into CONTEXT1 reg */
1337 if (is_rfc3686)
1338 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1339 LDST_SRCDST_BYTE_CONTEXT |
1340 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1341 LDST_OFFSET_SHIFT));
1342
1343 /* Choose operation */
1344 if (ctx1_iv_off)
1345 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1346 OP_ALG_DECRYPT);
1347 else
1348 append_dec_op1(desc, cdata->algtype);
1349
1350 /* Perform operation */
1351 ablkcipher_append_src_dst(desc);
1352
1353 #ifdef DEBUG
1354 print_hex_dump(KERN_ERR,
1355 "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
1356 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1357 #endif
1358 }
1359 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
1360
1361 /**
1362 * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
1363 * with HW-generated initialization vector.
1364 * @desc: pointer to buffer used for descriptor construction
1365 * @cdata: pointer to block cipher transform definitions
1366 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1367 * with OP_ALG_AAI_CBC.
1368 * @ivsize: initialization vector size
1369 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1370 * @ctx1_iv_off: IV offset in CONTEXT1 register
1371 */
cnstr_shdsc_ablkcipher_givencap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,const bool is_rfc3686,const u32 ctx1_iv_off)1372 void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
1373 unsigned int ivsize, const bool is_rfc3686,
1374 const u32 ctx1_iv_off)
1375 {
1376 u32 *key_jump_cmd, geniv;
1377
1378 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1379 /* Skip if already shared */
1380 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1381 JUMP_COND_SHRD);
1382
1383 /* Load class1 key only */
1384 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1385 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1386
1387 /* Load Nonce into CONTEXT1 reg */
1388 if (is_rfc3686) {
1389 const u8 *nonce = cdata->key_virt + cdata->keylen;
1390
1391 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1392 LDST_CLASS_IND_CCB |
1393 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1394 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1395 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1396 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1397 }
1398 set_jump_tgt_here(desc, key_jump_cmd);
1399
1400 /* Generate IV */
1401 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1402 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
1403 (ivsize << NFIFOENTRY_DLEN_SHIFT);
1404 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1405 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1406 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1407 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
1408 MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
1409 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1410 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1411
1412 /* Copy generated IV to memory */
1413 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1414 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1415
1416 /* Load Counter into CONTEXT1 reg */
1417 if (is_rfc3686)
1418 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1419 LDST_SRCDST_BYTE_CONTEXT |
1420 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1421 LDST_OFFSET_SHIFT));
1422
1423 if (ctx1_iv_off)
1424 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1425 (1 << JUMP_OFFSET_SHIFT));
1426
1427 /* Load operation */
1428 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1429 OP_ALG_ENCRYPT);
1430
1431 /* Perform operation */
1432 ablkcipher_append_src_dst(desc);
1433
1434 #ifdef DEBUG
1435 print_hex_dump(KERN_ERR,
1436 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1437 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1438 #endif
1439 }
1440 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
1441
1442 /**
1443 * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
1444 * descriptor
1445 * @desc: pointer to buffer used for descriptor construction
1446 * @cdata: pointer to block cipher transform definitions
1447 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1448 */
cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc,struct alginfo * cdata)1449 void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
1450 {
1451 __be64 sector_size = cpu_to_be64(512);
1452 u32 *key_jump_cmd;
1453
1454 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1455 /* Skip if already shared */
1456 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1457 JUMP_COND_SHRD);
1458
1459 /* Load class1 keys only */
1460 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1461 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1462
1463 /* Load sector size with index 40 bytes (0x28) */
1464 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1465 LDST_SRCDST_BYTE_CONTEXT |
1466 (0x28 << LDST_OFFSET_SHIFT));
1467
1468 set_jump_tgt_here(desc, key_jump_cmd);
1469
1470 /*
1471 * create sequence for loading the sector index
1472 * Upper 8B of IV - will be used as sector index
1473 * Lower 8B of IV - will be discarded
1474 */
1475 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1476 (0x20 << LDST_OFFSET_SHIFT));
1477 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1478
1479 /* Load operation */
1480 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1481 OP_ALG_ENCRYPT);
1482
1483 /* Perform operation */
1484 ablkcipher_append_src_dst(desc);
1485
1486 #ifdef DEBUG
1487 print_hex_dump(KERN_ERR,
1488 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1489 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1490 #endif
1491 }
1492 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
1493
1494 /**
1495 * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
1496 * descriptor
1497 * @desc: pointer to buffer used for descriptor construction
1498 * @cdata: pointer to block cipher transform definitions
1499 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1500 */
cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc,struct alginfo * cdata)1501 void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
1502 {
1503 __be64 sector_size = cpu_to_be64(512);
1504 u32 *key_jump_cmd;
1505
1506 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1507 /* Skip if already shared */
1508 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1509 JUMP_COND_SHRD);
1510
1511 /* Load class1 key only */
1512 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1513 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1514
1515 /* Load sector size with index 40 bytes (0x28) */
1516 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1517 LDST_SRCDST_BYTE_CONTEXT |
1518 (0x28 << LDST_OFFSET_SHIFT));
1519
1520 set_jump_tgt_here(desc, key_jump_cmd);
1521
1522 /*
1523 * create sequence for loading the sector index
1524 * Upper 8B of IV - will be used as sector index
1525 * Lower 8B of IV - will be discarded
1526 */
1527 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1528 (0x20 << LDST_OFFSET_SHIFT));
1529 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1530
1531 /* Load operation */
1532 append_dec_op1(desc, cdata->algtype);
1533
1534 /* Perform operation */
1535 ablkcipher_append_src_dst(desc);
1536
1537 #ifdef DEBUG
1538 print_hex_dump(KERN_ERR,
1539 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1540 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1541 #endif
1542 }
1543 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
1544
1545 MODULE_LICENSE("GPL");
1546 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1547 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
1548