1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha1.h>
22 #include <crypto/sha2.h>
23 #include <crypto/sm3.h>
24 #include <crypto/sm4.h>
25 #include <crypto/xts.h>
26 #include <crypto/skcipher.h>
27 #include <crypto/internal/aead.h>
28 #include <crypto/internal/skcipher.h>
29
30 #include "safexcel.h"
31
32 enum safexcel_cipher_direction {
33 SAFEXCEL_ENCRYPT,
34 SAFEXCEL_DECRYPT,
35 };
36
37 enum safexcel_cipher_alg {
38 SAFEXCEL_DES,
39 SAFEXCEL_3DES,
40 SAFEXCEL_AES,
41 SAFEXCEL_CHACHA20,
42 SAFEXCEL_SM4,
43 };
44
45 struct safexcel_cipher_ctx {
46 struct safexcel_context base;
47 struct safexcel_crypto_priv *priv;
48
49 u32 mode;
50 enum safexcel_cipher_alg alg;
51 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 u8 aadskip;
54 u8 blocksz;
55 u32 ivmask;
56 u32 ctrinit;
57
58 __le32 key[16];
59 u32 nonce;
60 unsigned int key_len, xts;
61
62 /* All the below is AEAD specific */
63 u32 hash_alg;
64 u32 state_sz;
65
66 struct crypto_aead *fback;
67 };
68
69 struct safexcel_cipher_req {
70 enum safexcel_cipher_direction direction;
71 /* Number of result descriptors associated to the request */
72 unsigned int rdescs;
73 bool needs_inv;
74 int nr_src, nr_dst;
75 };
76
safexcel_skcipher_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)77 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78 struct safexcel_command_desc *cdesc)
79 {
80 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82 /* 32 bit nonce */
83 cdesc->control_data.token[0] = ctx->nonce;
84 /* 64 bit IV part */
85 memcpy(&cdesc->control_data.token[1], iv, 8);
86 /* 32 bit counter, start at 0 or 1 (big endian!) */
87 cdesc->control_data.token[3] =
88 (__force u32)cpu_to_be32(ctx->ctrinit);
89 return 4;
90 }
91 if (ctx->alg == SAFEXCEL_CHACHA20) {
92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93 /* 96 bit nonce part */
94 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95 /* 32 bit counter */
96 cdesc->control_data.token[3] = *(u32 *)iv;
97 return 4;
98 }
99
100 cdesc->control_data.options |= ctx->ivmask;
101 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102 return ctx->blocksz / sizeof(u32);
103 }
104
safexcel_skcipher_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,u32 length)105 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106 struct safexcel_command_desc *cdesc,
107 struct safexcel_token *atoken,
108 u32 length)
109 {
110 struct safexcel_token *token;
111 int ivlen;
112
113 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114 if (ivlen == 4) {
115 /* No space in cdesc, instruction moves to atoken */
116 cdesc->additional_cdata_size = 1;
117 token = atoken;
118 } else {
119 /* Everything fits in cdesc */
120 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121 /* Need to pad with NOP */
122 eip197_noop_token(&token[1]);
123 }
124
125 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126 token->packet_length = length;
127 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128 EIP197_TOKEN_STAT_LAST_HASH;
129 token->instructions = EIP197_TOKEN_INS_LAST |
130 EIP197_TOKEN_INS_TYPE_CRYPTO |
131 EIP197_TOKEN_INS_TYPE_OUTPUT;
132 }
133
safexcel_aead_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)134 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135 struct safexcel_command_desc *cdesc)
136 {
137 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139 /* 32 bit nonce */
140 cdesc->control_data.token[0] = ctx->nonce;
141 /* 64 bit IV part */
142 memcpy(&cdesc->control_data.token[1], iv, 8);
143 /* 32 bit counter, start at 0 or 1 (big endian!) */
144 cdesc->control_data.token[3] =
145 (__force u32)cpu_to_be32(ctx->ctrinit);
146 return;
147 }
148 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149 /* 96 bit IV part */
150 memcpy(&cdesc->control_data.token[0], iv, 12);
151 /* 32 bit counter, start at 0 or 1 (big endian!) */
152 cdesc->control_data.token[3] =
153 (__force u32)cpu_to_be32(ctx->ctrinit);
154 return;
155 }
156 /* CBC */
157 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158 }
159
safexcel_aead_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,enum safexcel_cipher_direction direction,u32 cryptlen,u32 assoclen,u32 digestsize)160 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161 struct safexcel_command_desc *cdesc,
162 struct safexcel_token *atoken,
163 enum safexcel_cipher_direction direction,
164 u32 cryptlen, u32 assoclen, u32 digestsize)
165 {
166 struct safexcel_token *aadref;
167 int atoksize = 2; /* Start with minimum size */
168 int assocadj = assoclen - ctx->aadskip, aadalign;
169
170 /* Always 4 dwords of embedded IV for AEAD modes */
171 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172
173 if (direction == SAFEXCEL_DECRYPT)
174 cryptlen -= digestsize;
175
176 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177 /* Construct IV block B0 for the CBC-MAC */
178 u8 *final_iv = (u8 *)cdesc->control_data.token;
179 u8 *cbcmaciv = (u8 *)&atoken[1];
180 __le32 *aadlen = (__le32 *)&atoken[5];
181
182 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183 /* Length + nonce */
184 cdesc->control_data.token[0] = ctx->nonce;
185 /* Fixup flags byte */
186 *(__le32 *)cbcmaciv =
187 cpu_to_le32(ctx->nonce |
188 ((assocadj > 0) << 6) |
189 ((digestsize - 2) << 2));
190 /* 64 bit IV part */
191 memcpy(&cdesc->control_data.token[1], iv, 8);
192 memcpy(cbcmaciv + 4, iv, 8);
193 /* Start counter at 0 */
194 cdesc->control_data.token[3] = 0;
195 /* Message length */
196 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197 } else {
198 /* Variable length IV part */
199 memcpy(final_iv, iv, 15 - iv[0]);
200 memcpy(cbcmaciv, iv, 15 - iv[0]);
201 /* Start variable length counter at 0 */
202 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204 /* fixup flags byte */
205 cbcmaciv[0] |= ((assocadj > 0) << 6) |
206 ((digestsize - 2) << 2);
207 /* insert lower 2 bytes of message length */
208 cbcmaciv[14] = cryptlen >> 8;
209 cbcmaciv[15] = cryptlen & 255;
210 }
211
212 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213 atoken->packet_length = AES_BLOCK_SIZE +
214 ((assocadj > 0) << 1);
215 atoken->stat = 0;
216 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217 EIP197_TOKEN_INS_TYPE_HASH;
218
219 if (likely(assocadj)) {
220 *aadlen = cpu_to_le32((assocadj >> 8) |
221 (assocadj & 255) << 8);
222 atoken += 6;
223 atoksize += 7;
224 } else {
225 atoken += 5;
226 atoksize += 6;
227 }
228
229 /* Process AAD data */
230 aadref = atoken;
231 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232 atoken->packet_length = assocadj;
233 atoken->stat = 0;
234 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235 atoken++;
236
237 /* For CCM only, align AAD data towards hash engine */
238 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239 aadalign = (assocadj + 2) & 15;
240 atoken->packet_length = assocadj && aadalign ?
241 16 - aadalign :
242 0;
243 if (likely(cryptlen)) {
244 atoken->stat = 0;
245 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246 } else {
247 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248 atoken->instructions = EIP197_TOKEN_INS_LAST |
249 EIP197_TOKEN_INS_TYPE_HASH;
250 }
251 } else {
252 safexcel_aead_iv(ctx, iv, cdesc);
253
254 /* Process AAD data */
255 aadref = atoken;
256 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257 atoken->packet_length = assocadj;
258 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259 atoken->instructions = EIP197_TOKEN_INS_LAST |
260 EIP197_TOKEN_INS_TYPE_HASH;
261 }
262 atoken++;
263
264 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265 /* For ESP mode (and not GMAC), skip over the IV */
266 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268 atoken->stat = 0;
269 atoken->instructions = 0;
270 atoken++;
271 atoksize++;
272 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273 direction == SAFEXCEL_DECRYPT)) {
274 /* Poly-chacha decryption needs a dummy NOP here ... */
275 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276 atoken->packet_length = 16; /* According to Op Manual */
277 atoken->stat = 0;
278 atoken->instructions = 0;
279 atoken++;
280 atoksize++;
281 }
282
283 if (ctx->xcm) {
284 /* For GCM and CCM, obtain enc(Y0) */
285 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286 atoken->packet_length = 0;
287 atoken->stat = 0;
288 atoken->instructions = AES_BLOCK_SIZE;
289 atoken++;
290
291 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292 atoken->packet_length = AES_BLOCK_SIZE;
293 atoken->stat = 0;
294 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295 EIP197_TOKEN_INS_TYPE_CRYPTO;
296 atoken++;
297 atoksize += 2;
298 }
299
300 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301 /* Fixup stat field for AAD direction instruction */
302 aadref->stat = 0;
303
304 /* Process crypto data */
305 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306 atoken->packet_length = cryptlen;
307
308 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309 /* Fixup instruction field for AAD dir instruction */
310 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
312 /* Do not send to crypt engine in case of GMAC */
313 atoken->instructions = EIP197_TOKEN_INS_LAST |
314 EIP197_TOKEN_INS_TYPE_HASH |
315 EIP197_TOKEN_INS_TYPE_OUTPUT;
316 } else {
317 atoken->instructions = EIP197_TOKEN_INS_LAST |
318 EIP197_TOKEN_INS_TYPE_CRYPTO |
319 EIP197_TOKEN_INS_TYPE_HASH |
320 EIP197_TOKEN_INS_TYPE_OUTPUT;
321 }
322
323 cryptlen &= 15;
324 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325 atoken->stat = 0;
326 /* For CCM only, pad crypto data to the hash engine */
327 atoken++;
328 atoksize++;
329 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330 atoken->packet_length = 16 - cryptlen;
331 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333 } else {
334 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335 }
336 atoken++;
337 atoksize++;
338 }
339
340 if (direction == SAFEXCEL_ENCRYPT) {
341 /* Append ICV */
342 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343 atoken->packet_length = digestsize;
344 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345 EIP197_TOKEN_STAT_LAST_PACKET;
346 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348 } else {
349 /* Extract ICV */
350 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351 atoken->packet_length = digestsize;
352 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353 EIP197_TOKEN_STAT_LAST_PACKET;
354 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355 atoken++;
356 atoksize++;
357
358 /* Verify ICV */
359 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360 atoken->packet_length = digestsize |
361 EIP197_TOKEN_HASH_RESULT_VERIFY;
362 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363 EIP197_TOKEN_STAT_LAST_PACKET;
364 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365 }
366
367 /* Fixup length of the token in the command descriptor */
368 cdesc->additional_cdata_size = atoksize;
369 }
370
safexcel_skcipher_aes_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)371 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372 const u8 *key, unsigned int len)
373 {
374 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
377 struct crypto_aes_ctx aes;
378 int ret, i;
379
380 ret = aes_expandkey(&aes, key, len);
381 if (ret)
382 return ret;
383
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385 for (i = 0; i < len / sizeof(u32); i++) {
386 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387 ctx->base.needs_inv = true;
388 break;
389 }
390 }
391 }
392
393 for (i = 0; i < len / sizeof(u32); i++)
394 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395
396 ctx->key_len = len;
397
398 memzero_explicit(&aes, sizeof(aes));
399 return 0;
400 }
401
safexcel_aead_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)402 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403 unsigned int len)
404 {
405 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
408 struct crypto_authenc_keys keys;
409 struct crypto_aes_ctx aes;
410 int err = -EINVAL, i;
411 const char *alg;
412
413 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414 goto badkey;
415
416 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417 /* Must have at least space for the nonce here */
418 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419 goto badkey;
420 /* last 4 bytes of key are the nonce! */
421 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422 CTR_RFC3686_NONCE_SIZE);
423 /* exclude the nonce here */
424 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425 }
426
427 /* Encryption key */
428 switch (ctx->alg) {
429 case SAFEXCEL_DES:
430 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431 if (unlikely(err))
432 goto badkey;
433 break;
434 case SAFEXCEL_3DES:
435 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436 if (unlikely(err))
437 goto badkey;
438 break;
439 case SAFEXCEL_AES:
440 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441 if (unlikely(err))
442 goto badkey;
443 break;
444 case SAFEXCEL_SM4:
445 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446 goto badkey;
447 break;
448 default:
449 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450 goto badkey;
451 }
452
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455 if (le32_to_cpu(ctx->key[i]) !=
456 ((u32 *)keys.enckey)[i]) {
457 ctx->base.needs_inv = true;
458 break;
459 }
460 }
461 }
462
463 /* Auth key */
464 switch (ctx->hash_alg) {
465 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466 alg = "safexcel-sha1";
467 break;
468 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469 alg = "safexcel-sha224";
470 break;
471 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472 alg = "safexcel-sha256";
473 break;
474 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475 alg = "safexcel-sha384";
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478 alg = "safexcel-sha512";
479 break;
480 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481 alg = "safexcel-sm3";
482 break;
483 default:
484 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485 goto badkey;
486 }
487
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489 alg, ctx->state_sz))
490 goto badkey;
491
492 /* Now copy the keys into the context */
493 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495 ctx->key_len = keys.enckeylen;
496
497 memzero_explicit(&keys, sizeof(keys));
498 return 0;
499
500 badkey:
501 memzero_explicit(&keys, sizeof(keys));
502 return err;
503 }
504
safexcel_context_control(struct safexcel_cipher_ctx * ctx,struct crypto_async_request * async,struct safexcel_cipher_req * sreq,struct safexcel_command_desc * cdesc)505 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506 struct crypto_async_request *async,
507 struct safexcel_cipher_req *sreq,
508 struct safexcel_command_desc *cdesc)
509 {
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
511 int ctrl_size = ctx->key_len / sizeof(u32);
512
513 cdesc->control_data.control1 = ctx->mode;
514
515 if (ctx->aead) {
516 /* Take in account the ipad+opad digests */
517 if (ctx->xcm) {
518 ctrl_size += ctx->state_sz / sizeof(u32);
519 cdesc->control_data.control0 =
520 CONTEXT_CONTROL_KEY_EN |
521 CONTEXT_CONTROL_DIGEST_XCM |
522 ctx->hash_alg |
523 CONTEXT_CONTROL_SIZE(ctrl_size);
524 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525 /* Chacha20-Poly1305 */
526 cdesc->control_data.control0 =
527 CONTEXT_CONTROL_KEY_EN |
528 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
530 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532 ctx->hash_alg |
533 CONTEXT_CONTROL_SIZE(ctrl_size);
534 return 0;
535 } else {
536 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537 cdesc->control_data.control0 =
538 CONTEXT_CONTROL_KEY_EN |
539 CONTEXT_CONTROL_DIGEST_HMAC |
540 ctx->hash_alg |
541 CONTEXT_CONTROL_SIZE(ctrl_size);
542 }
543
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
545 (ctx->xcm == EIP197_XCM_MODE_CCM ||
546 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547 cdesc->control_data.control0 |=
548 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
550 cdesc->control_data.control0 |=
551 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553 cdesc->control_data.control0 |=
554 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555 else
556 cdesc->control_data.control0 |=
557 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558 } else {
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
560 cdesc->control_data.control0 =
561 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562 CONTEXT_CONTROL_KEY_EN |
563 CONTEXT_CONTROL_SIZE(ctrl_size);
564 else
565 cdesc->control_data.control0 =
566 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567 CONTEXT_CONTROL_KEY_EN |
568 CONTEXT_CONTROL_SIZE(ctrl_size);
569 }
570
571 if (ctx->alg == SAFEXCEL_DES) {
572 cdesc->control_data.control0 |=
573 CONTEXT_CONTROL_CRYPTO_ALG_DES;
574 } else if (ctx->alg == SAFEXCEL_3DES) {
575 cdesc->control_data.control0 |=
576 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577 } else if (ctx->alg == SAFEXCEL_AES) {
578 switch (ctx->key_len >> ctx->xts) {
579 case AES_KEYSIZE_128:
580 cdesc->control_data.control0 |=
581 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582 break;
583 case AES_KEYSIZE_192:
584 cdesc->control_data.control0 |=
585 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586 break;
587 case AES_KEYSIZE_256:
588 cdesc->control_data.control0 |=
589 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590 break;
591 default:
592 dev_err(priv->dev, "aes keysize not supported: %u\n",
593 ctx->key_len >> ctx->xts);
594 return -EINVAL;
595 }
596 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597 cdesc->control_data.control0 |=
598 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599 } else if (ctx->alg == SAFEXCEL_SM4) {
600 cdesc->control_data.control0 |=
601 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602 }
603
604 return 0;
605 }
606
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)607 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608 struct crypto_async_request *async,
609 struct scatterlist *src,
610 struct scatterlist *dst,
611 unsigned int cryptlen,
612 struct safexcel_cipher_req *sreq,
613 bool *should_complete, int *ret)
614 {
615 struct skcipher_request *areq = skcipher_request_cast(async);
616 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618 struct safexcel_result_desc *rdesc;
619 int ndesc = 0;
620
621 *ret = 0;
622
623 if (unlikely(!sreq->rdescs))
624 return 0;
625
626 while (sreq->rdescs--) {
627 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628 if (IS_ERR(rdesc)) {
629 dev_err(priv->dev,
630 "cipher: result: could not retrieve the result descriptor\n");
631 *ret = PTR_ERR(rdesc);
632 break;
633 }
634
635 if (likely(!*ret))
636 *ret = safexcel_rdesc_check_errors(priv, rdesc);
637
638 ndesc++;
639 }
640
641 safexcel_complete(priv, ring);
642
643 if (src == dst) {
644 if (sreq->nr_src > 0)
645 dma_unmap_sg(priv->dev, src, sreq->nr_src,
646 DMA_BIDIRECTIONAL);
647 } else {
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
650 DMA_TO_DEVICE);
651 if (sreq->nr_dst > 0)
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
653 DMA_FROM_DEVICE);
654 }
655
656 /*
657 * Update IV in req from last crypto output word for CBC modes
658 */
659 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
660 (sreq->direction == SAFEXCEL_ENCRYPT)) {
661 /* For encrypt take the last output word */
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
663 crypto_skcipher_ivsize(skcipher),
664 (cryptlen -
665 crypto_skcipher_ivsize(skcipher)));
666 }
667
668 *should_complete = true;
669
670 return ndesc;
671 }
672
safexcel_send_req(struct crypto_async_request * base,int ring,struct safexcel_cipher_req * sreq,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,unsigned int assoclen,unsigned int digestsize,u8 * iv,int * commands,int * results)673 static int safexcel_send_req(struct crypto_async_request *base, int ring,
674 struct safexcel_cipher_req *sreq,
675 struct scatterlist *src, struct scatterlist *dst,
676 unsigned int cryptlen, unsigned int assoclen,
677 unsigned int digestsize, u8 *iv, int *commands,
678 int *results)
679 {
680 struct skcipher_request *areq = skcipher_request_cast(base);
681 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
682 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683 struct safexcel_crypto_priv *priv = ctx->base.priv;
684 struct safexcel_command_desc *cdesc;
685 struct safexcel_command_desc *first_cdesc = NULL;
686 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
687 struct scatterlist *sg;
688 unsigned int totlen;
689 unsigned int totlen_src = cryptlen + assoclen;
690 unsigned int totlen_dst = totlen_src;
691 struct safexcel_token *atoken;
692 int n_cdesc = 0, n_rdesc = 0;
693 int queued, i, ret = 0;
694 bool first = true;
695
696 sreq->nr_src = sg_nents_for_len(src, totlen_src);
697
698 if (ctx->aead) {
699 /*
700 * AEAD has auth tag appended to output for encrypt and
701 * removed from the output for decrypt!
702 */
703 if (sreq->direction == SAFEXCEL_DECRYPT)
704 totlen_dst -= digestsize;
705 else
706 totlen_dst += digestsize;
707
708 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709 &ctx->base.ipad, ctx->state_sz);
710 if (!ctx->xcm)
711 memcpy(ctx->base.ctxr->data + (ctx->key_len +
712 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
713 ctx->state_sz);
714 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
715 (sreq->direction == SAFEXCEL_DECRYPT)) {
716 /*
717 * Save IV from last crypto input word for CBC modes in decrypt
718 * direction. Need to do this first in case of inplace operation
719 * as it will be overwritten.
720 */
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
722 crypto_skcipher_ivsize(skcipher),
723 (totlen_src -
724 crypto_skcipher_ivsize(skcipher)));
725 }
726
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
728
729 /*
730 * Remember actual input length, source buffer length may be
731 * updated in case of inline operation below.
732 */
733 totlen = totlen_src;
734 queued = totlen_src;
735
736 if (src == dst) {
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738 sreq->nr_dst = sreq->nr_src;
739 if (unlikely((totlen_src || totlen_dst) &&
740 (sreq->nr_src <= 0))) {
741 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
742 max(totlen_src, totlen_dst));
743 return -EINVAL;
744 }
745 if (sreq->nr_src > 0)
746 dma_map_sg(priv->dev, src, sreq->nr_src,
747 DMA_BIDIRECTIONAL);
748 } else {
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
750 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
751 totlen_src);
752 return -EINVAL;
753 }
754
755 if (sreq->nr_src > 0)
756 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
757
758 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
759 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
760 totlen_dst);
761 ret = -EINVAL;
762 goto unmap;
763 }
764
765 if (sreq->nr_dst > 0)
766 dma_map_sg(priv->dev, dst, sreq->nr_dst,
767 DMA_FROM_DEVICE);
768 }
769
770 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
771
772 if (!totlen) {
773 /*
774 * The EIP97 cannot deal with zero length input packets!
775 * So stuff a dummy command descriptor indicating a 1 byte
776 * (dummy) input packet, using the context record as source.
777 */
778 first_cdesc = safexcel_add_cdesc(priv, ring,
779 1, 1, ctx->base.ctxr_dma,
780 1, 1, ctx->base.ctxr_dma,
781 &atoken);
782 if (IS_ERR(first_cdesc)) {
783 /* No space left in the command descriptor ring */
784 ret = PTR_ERR(first_cdesc);
785 goto cdesc_rollback;
786 }
787 n_cdesc = 1;
788 goto skip_cdesc;
789 }
790
791 /* command descriptors */
792 for_each_sg(src, sg, sreq->nr_src, i) {
793 int len = sg_dma_len(sg);
794
795 /* Do not overflow the request */
796 if (queued < len)
797 len = queued;
798
799 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
800 !(queued - len),
801 sg_dma_address(sg), len, totlen,
802 ctx->base.ctxr_dma, &atoken);
803 if (IS_ERR(cdesc)) {
804 /* No space left in the command descriptor ring */
805 ret = PTR_ERR(cdesc);
806 goto cdesc_rollback;
807 }
808
809 if (!n_cdesc)
810 first_cdesc = cdesc;
811
812 n_cdesc++;
813 queued -= len;
814 if (!queued)
815 break;
816 }
817 skip_cdesc:
818 /* Add context control words and token to first command descriptor */
819 safexcel_context_control(ctx, base, sreq, first_cdesc);
820 if (ctx->aead)
821 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
822 sreq->direction, cryptlen,
823 assoclen, digestsize);
824 else
825 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
826 cryptlen);
827
828 /* result descriptors */
829 for_each_sg(dst, sg, sreq->nr_dst, i) {
830 bool last = (i == sreq->nr_dst - 1);
831 u32 len = sg_dma_len(sg);
832
833 /* only allow the part of the buffer we know we need */
834 if (len > totlen_dst)
835 len = totlen_dst;
836 if (unlikely(!len))
837 break;
838 totlen_dst -= len;
839
840 /* skip over AAD space in buffer - not written */
841 if (assoclen) {
842 if (assoclen >= len) {
843 assoclen -= len;
844 continue;
845 }
846 rdesc = safexcel_add_rdesc(priv, ring, first, last,
847 sg_dma_address(sg) +
848 assoclen,
849 len - assoclen);
850 assoclen = 0;
851 } else {
852 rdesc = safexcel_add_rdesc(priv, ring, first, last,
853 sg_dma_address(sg),
854 len);
855 }
856 if (IS_ERR(rdesc)) {
857 /* No space left in the result descriptor ring */
858 ret = PTR_ERR(rdesc);
859 goto rdesc_rollback;
860 }
861 if (first) {
862 first_rdesc = rdesc;
863 first = false;
864 }
865 n_rdesc++;
866 }
867
868 if (unlikely(first)) {
869 /*
870 * Special case: AEAD decrypt with only AAD data.
871 * In this case there is NO output data from the engine,
872 * but the engine still needs a result descriptor!
873 * Create a dummy one just for catching the result token.
874 */
875 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
876 if (IS_ERR(rdesc)) {
877 /* No space left in the result descriptor ring */
878 ret = PTR_ERR(rdesc);
879 goto rdesc_rollback;
880 }
881 first_rdesc = rdesc;
882 n_rdesc = 1;
883 }
884
885 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
886
887 *commands = n_cdesc;
888 *results = n_rdesc;
889 return 0;
890
891 rdesc_rollback:
892 for (i = 0; i < n_rdesc; i++)
893 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
894 cdesc_rollback:
895 for (i = 0; i < n_cdesc; i++)
896 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
897 unmap:
898 if (src == dst) {
899 if (sreq->nr_src > 0)
900 dma_unmap_sg(priv->dev, src, sreq->nr_src,
901 DMA_BIDIRECTIONAL);
902 } else {
903 if (sreq->nr_src > 0)
904 dma_unmap_sg(priv->dev, src, sreq->nr_src,
905 DMA_TO_DEVICE);
906 if (sreq->nr_dst > 0)
907 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
908 DMA_FROM_DEVICE);
909 }
910
911 return ret;
912 }
913
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)914 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
915 int ring,
916 struct crypto_async_request *base,
917 struct safexcel_cipher_req *sreq,
918 bool *should_complete, int *ret)
919 {
920 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
921 struct safexcel_result_desc *rdesc;
922 int ndesc = 0, enq_ret;
923
924 *ret = 0;
925
926 if (unlikely(!sreq->rdescs))
927 return 0;
928
929 while (sreq->rdescs--) {
930 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
931 if (IS_ERR(rdesc)) {
932 dev_err(priv->dev,
933 "cipher: invalidate: could not retrieve the result descriptor\n");
934 *ret = PTR_ERR(rdesc);
935 break;
936 }
937
938 if (likely(!*ret))
939 *ret = safexcel_rdesc_check_errors(priv, rdesc);
940
941 ndesc++;
942 }
943
944 safexcel_complete(priv, ring);
945
946 if (ctx->base.exit_inv) {
947 dma_pool_free(priv->context_pool, ctx->base.ctxr,
948 ctx->base.ctxr_dma);
949
950 *should_complete = true;
951
952 return ndesc;
953 }
954
955 ring = safexcel_select_ring(priv);
956 ctx->base.ring = ring;
957
958 spin_lock_bh(&priv->ring[ring].queue_lock);
959 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
960 spin_unlock_bh(&priv->ring[ring].queue_lock);
961
962 if (enq_ret != -EINPROGRESS)
963 *ret = enq_ret;
964
965 queue_work(priv->ring[ring].workqueue,
966 &priv->ring[ring].work_data.work);
967
968 *should_complete = false;
969
970 return ndesc;
971 }
972
safexcel_skcipher_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)973 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
974 int ring,
975 struct crypto_async_request *async,
976 bool *should_complete, int *ret)
977 {
978 struct skcipher_request *req = skcipher_request_cast(async);
979 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
980 int err;
981
982 if (sreq->needs_inv) {
983 sreq->needs_inv = false;
984 err = safexcel_handle_inv_result(priv, ring, async, sreq,
985 should_complete, ret);
986 } else {
987 err = safexcel_handle_req_result(priv, ring, async, req->src,
988 req->dst, req->cryptlen, sreq,
989 should_complete, ret);
990 }
991
992 return err;
993 }
994
safexcel_aead_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)995 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
996 int ring,
997 struct crypto_async_request *async,
998 bool *should_complete, int *ret)
999 {
1000 struct aead_request *req = aead_request_cast(async);
1001 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1002 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1003 int err;
1004
1005 if (sreq->needs_inv) {
1006 sreq->needs_inv = false;
1007 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1008 should_complete, ret);
1009 } else {
1010 err = safexcel_handle_req_result(priv, ring, async, req->src,
1011 req->dst,
1012 req->cryptlen + crypto_aead_authsize(tfm),
1013 sreq, should_complete, ret);
1014 }
1015
1016 return err;
1017 }
1018
safexcel_cipher_send_inv(struct crypto_async_request * base,int ring,int * commands,int * results)1019 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1020 int ring, int *commands, int *results)
1021 {
1022 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1023 struct safexcel_crypto_priv *priv = ctx->base.priv;
1024 int ret;
1025
1026 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1027 if (unlikely(ret))
1028 return ret;
1029
1030 *commands = 1;
1031 *results = 1;
1032
1033 return 0;
1034 }
1035
safexcel_skcipher_send(struct crypto_async_request * async,int ring,int * commands,int * results)1036 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1037 int *commands, int *results)
1038 {
1039 struct skcipher_request *req = skcipher_request_cast(async);
1040 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1041 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1042 struct safexcel_crypto_priv *priv = ctx->base.priv;
1043 int ret;
1044
1045 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1046
1047 if (sreq->needs_inv) {
1048 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1049 } else {
1050 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1051 u8 input_iv[AES_BLOCK_SIZE];
1052
1053 /*
1054 * Save input IV in case of CBC decrypt mode
1055 * Will be overwritten with output IV prior to use!
1056 */
1057 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1058
1059 ret = safexcel_send_req(async, ring, sreq, req->src,
1060 req->dst, req->cryptlen, 0, 0, input_iv,
1061 commands, results);
1062 }
1063
1064 sreq->rdescs = *results;
1065 return ret;
1066 }
1067
safexcel_aead_send(struct crypto_async_request * async,int ring,int * commands,int * results)1068 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1069 int *commands, int *results)
1070 {
1071 struct aead_request *req = aead_request_cast(async);
1072 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1073 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1074 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1075 struct safexcel_crypto_priv *priv = ctx->base.priv;
1076 int ret;
1077
1078 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1079
1080 if (sreq->needs_inv)
1081 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1082 else
1083 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1084 req->cryptlen, req->assoclen,
1085 crypto_aead_authsize(tfm), req->iv,
1086 commands, results);
1087 sreq->rdescs = *results;
1088 return ret;
1089 }
1090
safexcel_cipher_exit_inv(struct crypto_tfm * tfm,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,struct crypto_wait * result)1091 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1092 struct crypto_async_request *base,
1093 struct safexcel_cipher_req *sreq,
1094 struct crypto_wait *result)
1095 {
1096 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1097 struct safexcel_crypto_priv *priv = ctx->base.priv;
1098 int ring = ctx->base.ring;
1099 int err;
1100
1101 ctx = crypto_tfm_ctx(base->tfm);
1102 ctx->base.exit_inv = true;
1103 sreq->needs_inv = true;
1104
1105 spin_lock_bh(&priv->ring[ring].queue_lock);
1106 crypto_enqueue_request(&priv->ring[ring].queue, base);
1107 spin_unlock_bh(&priv->ring[ring].queue_lock);
1108
1109 queue_work(priv->ring[ring].workqueue,
1110 &priv->ring[ring].work_data.work);
1111
1112 err = crypto_wait_req(-EINPROGRESS, result);
1113
1114 if (err) {
1115 dev_warn(priv->dev,
1116 "cipher: sync: invalidate: completion error %d\n",
1117 err);
1118 return err;
1119 }
1120
1121 return 0;
1122 }
1123
safexcel_skcipher_exit_inv(struct crypto_tfm * tfm)1124 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1125 {
1126 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1127 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1128 DECLARE_CRYPTO_WAIT(result);
1129
1130 memset(req, 0, sizeof(struct skcipher_request));
1131
1132 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1133 crypto_req_done, &result);
1134 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1135
1136 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1137 }
1138
safexcel_aead_exit_inv(struct crypto_tfm * tfm)1139 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1140 {
1141 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1142 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1143 DECLARE_CRYPTO_WAIT(result);
1144
1145 memset(req, 0, sizeof(struct aead_request));
1146
1147 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1148 crypto_req_done, &result);
1149 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1150
1151 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1152 }
1153
safexcel_queue_req(struct crypto_async_request * base,struct safexcel_cipher_req * sreq,enum safexcel_cipher_direction dir)1154 static int safexcel_queue_req(struct crypto_async_request *base,
1155 struct safexcel_cipher_req *sreq,
1156 enum safexcel_cipher_direction dir)
1157 {
1158 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1159 struct safexcel_crypto_priv *priv = ctx->base.priv;
1160 int ret, ring;
1161
1162 sreq->needs_inv = false;
1163 sreq->direction = dir;
1164
1165 if (ctx->base.ctxr) {
1166 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1167 sreq->needs_inv = true;
1168 ctx->base.needs_inv = false;
1169 }
1170 } else {
1171 ctx->base.ring = safexcel_select_ring(priv);
1172 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1173 EIP197_GFP_FLAGS(*base),
1174 &ctx->base.ctxr_dma);
1175 if (!ctx->base.ctxr)
1176 return -ENOMEM;
1177 }
1178
1179 ring = ctx->base.ring;
1180
1181 spin_lock_bh(&priv->ring[ring].queue_lock);
1182 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1183 spin_unlock_bh(&priv->ring[ring].queue_lock);
1184
1185 queue_work(priv->ring[ring].workqueue,
1186 &priv->ring[ring].work_data.work);
1187
1188 return ret;
1189 }
1190
safexcel_encrypt(struct skcipher_request * req)1191 static int safexcel_encrypt(struct skcipher_request *req)
1192 {
1193 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1194 SAFEXCEL_ENCRYPT);
1195 }
1196
safexcel_decrypt(struct skcipher_request * req)1197 static int safexcel_decrypt(struct skcipher_request *req)
1198 {
1199 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1200 SAFEXCEL_DECRYPT);
1201 }
1202
safexcel_skcipher_cra_init(struct crypto_tfm * tfm)1203 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1204 {
1205 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1206 struct safexcel_alg_template *tmpl =
1207 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1208 alg.skcipher.base);
1209
1210 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1211 sizeof(struct safexcel_cipher_req));
1212
1213 ctx->base.priv = tmpl->priv;
1214
1215 ctx->base.send = safexcel_skcipher_send;
1216 ctx->base.handle_result = safexcel_skcipher_handle_result;
1217 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1218 ctx->ctrinit = 1;
1219 return 0;
1220 }
1221
safexcel_cipher_cra_exit(struct crypto_tfm * tfm)1222 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1223 {
1224 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1225
1226 memzero_explicit(ctx->key, sizeof(ctx->key));
1227
1228 /* context not allocated, skip invalidation */
1229 if (!ctx->base.ctxr)
1230 return -ENOMEM;
1231
1232 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1233 return 0;
1234 }
1235
safexcel_skcipher_cra_exit(struct crypto_tfm * tfm)1236 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1237 {
1238 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1239 struct safexcel_crypto_priv *priv = ctx->base.priv;
1240 int ret;
1241
1242 if (safexcel_cipher_cra_exit(tfm))
1243 return;
1244
1245 if (priv->flags & EIP197_TRC_CACHE) {
1246 ret = safexcel_skcipher_exit_inv(tfm);
1247 if (ret)
1248 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1249 ret);
1250 } else {
1251 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1252 ctx->base.ctxr_dma);
1253 }
1254 }
1255
safexcel_aead_cra_exit(struct crypto_tfm * tfm)1256 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1257 {
1258 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1259 struct safexcel_crypto_priv *priv = ctx->base.priv;
1260 int ret;
1261
1262 if (safexcel_cipher_cra_exit(tfm))
1263 return;
1264
1265 if (priv->flags & EIP197_TRC_CACHE) {
1266 ret = safexcel_aead_exit_inv(tfm);
1267 if (ret)
1268 dev_warn(priv->dev, "aead: invalidation error %d\n",
1269 ret);
1270 } else {
1271 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1272 ctx->base.ctxr_dma);
1273 }
1274 }
1275
safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm * tfm)1276 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1277 {
1278 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1279
1280 safexcel_skcipher_cra_init(tfm);
1281 ctx->alg = SAFEXCEL_AES;
1282 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1283 ctx->blocksz = 0;
1284 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1285 return 0;
1286 }
1287
1288 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1289 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1290 .algo_mask = SAFEXCEL_ALG_AES,
1291 .alg.skcipher = {
1292 .setkey = safexcel_skcipher_aes_setkey,
1293 .encrypt = safexcel_encrypt,
1294 .decrypt = safexcel_decrypt,
1295 .min_keysize = AES_MIN_KEY_SIZE,
1296 .max_keysize = AES_MAX_KEY_SIZE,
1297 .base = {
1298 .cra_name = "ecb(aes)",
1299 .cra_driver_name = "safexcel-ecb-aes",
1300 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1301 .cra_flags = CRYPTO_ALG_ASYNC |
1302 CRYPTO_ALG_ALLOCATES_MEMORY |
1303 CRYPTO_ALG_KERN_DRIVER_ONLY,
1304 .cra_blocksize = AES_BLOCK_SIZE,
1305 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1306 .cra_alignmask = 0,
1307 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1308 .cra_exit = safexcel_skcipher_cra_exit,
1309 .cra_module = THIS_MODULE,
1310 },
1311 },
1312 };
1313
safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm * tfm)1314 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1315 {
1316 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1317
1318 safexcel_skcipher_cra_init(tfm);
1319 ctx->alg = SAFEXCEL_AES;
1320 ctx->blocksz = AES_BLOCK_SIZE;
1321 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1322 return 0;
1323 }
1324
1325 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1326 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1327 .algo_mask = SAFEXCEL_ALG_AES,
1328 .alg.skcipher = {
1329 .setkey = safexcel_skcipher_aes_setkey,
1330 .encrypt = safexcel_encrypt,
1331 .decrypt = safexcel_decrypt,
1332 .min_keysize = AES_MIN_KEY_SIZE,
1333 .max_keysize = AES_MAX_KEY_SIZE,
1334 .ivsize = AES_BLOCK_SIZE,
1335 .base = {
1336 .cra_name = "cbc(aes)",
1337 .cra_driver_name = "safexcel-cbc-aes",
1338 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1339 .cra_flags = CRYPTO_ALG_ASYNC |
1340 CRYPTO_ALG_ALLOCATES_MEMORY |
1341 CRYPTO_ALG_KERN_DRIVER_ONLY,
1342 .cra_blocksize = AES_BLOCK_SIZE,
1343 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1344 .cra_alignmask = 0,
1345 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1346 .cra_exit = safexcel_skcipher_cra_exit,
1347 .cra_module = THIS_MODULE,
1348 },
1349 },
1350 };
1351
safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm * tfm)1352 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1353 {
1354 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1355
1356 safexcel_skcipher_cra_init(tfm);
1357 ctx->alg = SAFEXCEL_AES;
1358 ctx->blocksz = AES_BLOCK_SIZE;
1359 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1360 return 0;
1361 }
1362
1363 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1364 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1365 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1366 .alg.skcipher = {
1367 .setkey = safexcel_skcipher_aes_setkey,
1368 .encrypt = safexcel_encrypt,
1369 .decrypt = safexcel_decrypt,
1370 .min_keysize = AES_MIN_KEY_SIZE,
1371 .max_keysize = AES_MAX_KEY_SIZE,
1372 .ivsize = AES_BLOCK_SIZE,
1373 .base = {
1374 .cra_name = "cfb(aes)",
1375 .cra_driver_name = "safexcel-cfb-aes",
1376 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1377 .cra_flags = CRYPTO_ALG_ASYNC |
1378 CRYPTO_ALG_ALLOCATES_MEMORY |
1379 CRYPTO_ALG_KERN_DRIVER_ONLY,
1380 .cra_blocksize = 1,
1381 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1382 .cra_alignmask = 0,
1383 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1384 .cra_exit = safexcel_skcipher_cra_exit,
1385 .cra_module = THIS_MODULE,
1386 },
1387 },
1388 };
1389
safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm * tfm)1390 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1391 {
1392 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1393
1394 safexcel_skcipher_cra_init(tfm);
1395 ctx->alg = SAFEXCEL_AES;
1396 ctx->blocksz = AES_BLOCK_SIZE;
1397 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1398 return 0;
1399 }
1400
1401 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1402 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1403 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1404 .alg.skcipher = {
1405 .setkey = safexcel_skcipher_aes_setkey,
1406 .encrypt = safexcel_encrypt,
1407 .decrypt = safexcel_decrypt,
1408 .min_keysize = AES_MIN_KEY_SIZE,
1409 .max_keysize = AES_MAX_KEY_SIZE,
1410 .ivsize = AES_BLOCK_SIZE,
1411 .base = {
1412 .cra_name = "ofb(aes)",
1413 .cra_driver_name = "safexcel-ofb-aes",
1414 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1415 .cra_flags = CRYPTO_ALG_ASYNC |
1416 CRYPTO_ALG_ALLOCATES_MEMORY |
1417 CRYPTO_ALG_KERN_DRIVER_ONLY,
1418 .cra_blocksize = 1,
1419 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1420 .cra_alignmask = 0,
1421 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1422 .cra_exit = safexcel_skcipher_cra_exit,
1423 .cra_module = THIS_MODULE,
1424 },
1425 },
1426 };
1427
safexcel_skcipher_aesctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1428 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1429 const u8 *key, unsigned int len)
1430 {
1431 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1432 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1433 struct safexcel_crypto_priv *priv = ctx->base.priv;
1434 struct crypto_aes_ctx aes;
1435 int ret, i;
1436 unsigned int keylen;
1437
1438 /* last 4 bytes of key are the nonce! */
1439 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1440 /* exclude the nonce here */
1441 keylen = len - CTR_RFC3686_NONCE_SIZE;
1442 ret = aes_expandkey(&aes, key, keylen);
1443 if (ret)
1444 return ret;
1445
1446 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1447 for (i = 0; i < keylen / sizeof(u32); i++) {
1448 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1449 ctx->base.needs_inv = true;
1450 break;
1451 }
1452 }
1453 }
1454
1455 for (i = 0; i < keylen / sizeof(u32); i++)
1456 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1457
1458 ctx->key_len = keylen;
1459
1460 memzero_explicit(&aes, sizeof(aes));
1461 return 0;
1462 }
1463
safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm * tfm)1464 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1465 {
1466 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1467
1468 safexcel_skcipher_cra_init(tfm);
1469 ctx->alg = SAFEXCEL_AES;
1470 ctx->blocksz = AES_BLOCK_SIZE;
1471 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1472 return 0;
1473 }
1474
1475 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1476 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1477 .algo_mask = SAFEXCEL_ALG_AES,
1478 .alg.skcipher = {
1479 .setkey = safexcel_skcipher_aesctr_setkey,
1480 .encrypt = safexcel_encrypt,
1481 .decrypt = safexcel_decrypt,
1482 /* Add nonce size */
1483 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1484 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1485 .ivsize = CTR_RFC3686_IV_SIZE,
1486 .base = {
1487 .cra_name = "rfc3686(ctr(aes))",
1488 .cra_driver_name = "safexcel-ctr-aes",
1489 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1490 .cra_flags = CRYPTO_ALG_ASYNC |
1491 CRYPTO_ALG_ALLOCATES_MEMORY |
1492 CRYPTO_ALG_KERN_DRIVER_ONLY,
1493 .cra_blocksize = 1,
1494 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1495 .cra_alignmask = 0,
1496 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1497 .cra_exit = safexcel_skcipher_cra_exit,
1498 .cra_module = THIS_MODULE,
1499 },
1500 },
1501 };
1502
safexcel_des_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1503 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1504 unsigned int len)
1505 {
1506 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1507 struct safexcel_crypto_priv *priv = ctx->base.priv;
1508 int ret;
1509
1510 ret = verify_skcipher_des_key(ctfm, key);
1511 if (ret)
1512 return ret;
1513
1514 /* if context exits and key changed, need to invalidate it */
1515 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1516 if (memcmp(ctx->key, key, len))
1517 ctx->base.needs_inv = true;
1518
1519 memcpy(ctx->key, key, len);
1520 ctx->key_len = len;
1521
1522 return 0;
1523 }
1524
safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm * tfm)1525 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1526 {
1527 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1528
1529 safexcel_skcipher_cra_init(tfm);
1530 ctx->alg = SAFEXCEL_DES;
1531 ctx->blocksz = DES_BLOCK_SIZE;
1532 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1533 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1534 return 0;
1535 }
1536
1537 struct safexcel_alg_template safexcel_alg_cbc_des = {
1538 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1539 .algo_mask = SAFEXCEL_ALG_DES,
1540 .alg.skcipher = {
1541 .setkey = safexcel_des_setkey,
1542 .encrypt = safexcel_encrypt,
1543 .decrypt = safexcel_decrypt,
1544 .min_keysize = DES_KEY_SIZE,
1545 .max_keysize = DES_KEY_SIZE,
1546 .ivsize = DES_BLOCK_SIZE,
1547 .base = {
1548 .cra_name = "cbc(des)",
1549 .cra_driver_name = "safexcel-cbc-des",
1550 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1551 .cra_flags = CRYPTO_ALG_ASYNC |
1552 CRYPTO_ALG_ALLOCATES_MEMORY |
1553 CRYPTO_ALG_KERN_DRIVER_ONLY,
1554 .cra_blocksize = DES_BLOCK_SIZE,
1555 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1556 .cra_alignmask = 0,
1557 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1558 .cra_exit = safexcel_skcipher_cra_exit,
1559 .cra_module = THIS_MODULE,
1560 },
1561 },
1562 };
1563
safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm * tfm)1564 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1565 {
1566 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1567
1568 safexcel_skcipher_cra_init(tfm);
1569 ctx->alg = SAFEXCEL_DES;
1570 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1571 ctx->blocksz = 0;
1572 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1573 return 0;
1574 }
1575
1576 struct safexcel_alg_template safexcel_alg_ecb_des = {
1577 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1578 .algo_mask = SAFEXCEL_ALG_DES,
1579 .alg.skcipher = {
1580 .setkey = safexcel_des_setkey,
1581 .encrypt = safexcel_encrypt,
1582 .decrypt = safexcel_decrypt,
1583 .min_keysize = DES_KEY_SIZE,
1584 .max_keysize = DES_KEY_SIZE,
1585 .base = {
1586 .cra_name = "ecb(des)",
1587 .cra_driver_name = "safexcel-ecb-des",
1588 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1589 .cra_flags = CRYPTO_ALG_ASYNC |
1590 CRYPTO_ALG_ALLOCATES_MEMORY |
1591 CRYPTO_ALG_KERN_DRIVER_ONLY,
1592 .cra_blocksize = DES_BLOCK_SIZE,
1593 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1594 .cra_alignmask = 0,
1595 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1596 .cra_exit = safexcel_skcipher_cra_exit,
1597 .cra_module = THIS_MODULE,
1598 },
1599 },
1600 };
1601
safexcel_des3_ede_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1602 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1603 const u8 *key, unsigned int len)
1604 {
1605 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1606 struct safexcel_crypto_priv *priv = ctx->base.priv;
1607 int err;
1608
1609 err = verify_skcipher_des3_key(ctfm, key);
1610 if (err)
1611 return err;
1612
1613 /* if context exits and key changed, need to invalidate it */
1614 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1615 if (memcmp(ctx->key, key, len))
1616 ctx->base.needs_inv = true;
1617
1618 memcpy(ctx->key, key, len);
1619 ctx->key_len = len;
1620
1621 return 0;
1622 }
1623
safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm * tfm)1624 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1625 {
1626 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1627
1628 safexcel_skcipher_cra_init(tfm);
1629 ctx->alg = SAFEXCEL_3DES;
1630 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1631 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1632 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1633 return 0;
1634 }
1635
1636 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1637 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1638 .algo_mask = SAFEXCEL_ALG_DES,
1639 .alg.skcipher = {
1640 .setkey = safexcel_des3_ede_setkey,
1641 .encrypt = safexcel_encrypt,
1642 .decrypt = safexcel_decrypt,
1643 .min_keysize = DES3_EDE_KEY_SIZE,
1644 .max_keysize = DES3_EDE_KEY_SIZE,
1645 .ivsize = DES3_EDE_BLOCK_SIZE,
1646 .base = {
1647 .cra_name = "cbc(des3_ede)",
1648 .cra_driver_name = "safexcel-cbc-des3_ede",
1649 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1650 .cra_flags = CRYPTO_ALG_ASYNC |
1651 CRYPTO_ALG_ALLOCATES_MEMORY |
1652 CRYPTO_ALG_KERN_DRIVER_ONLY,
1653 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1654 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1655 .cra_alignmask = 0,
1656 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1657 .cra_exit = safexcel_skcipher_cra_exit,
1658 .cra_module = THIS_MODULE,
1659 },
1660 },
1661 };
1662
safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm * tfm)1663 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1664 {
1665 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1666
1667 safexcel_skcipher_cra_init(tfm);
1668 ctx->alg = SAFEXCEL_3DES;
1669 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1670 ctx->blocksz = 0;
1671 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1672 return 0;
1673 }
1674
1675 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1676 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1677 .algo_mask = SAFEXCEL_ALG_DES,
1678 .alg.skcipher = {
1679 .setkey = safexcel_des3_ede_setkey,
1680 .encrypt = safexcel_encrypt,
1681 .decrypt = safexcel_decrypt,
1682 .min_keysize = DES3_EDE_KEY_SIZE,
1683 .max_keysize = DES3_EDE_KEY_SIZE,
1684 .base = {
1685 .cra_name = "ecb(des3_ede)",
1686 .cra_driver_name = "safexcel-ecb-des3_ede",
1687 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1688 .cra_flags = CRYPTO_ALG_ASYNC |
1689 CRYPTO_ALG_ALLOCATES_MEMORY |
1690 CRYPTO_ALG_KERN_DRIVER_ONLY,
1691 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1692 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1693 .cra_alignmask = 0,
1694 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1695 .cra_exit = safexcel_skcipher_cra_exit,
1696 .cra_module = THIS_MODULE,
1697 },
1698 },
1699 };
1700
safexcel_aead_encrypt(struct aead_request * req)1701 static int safexcel_aead_encrypt(struct aead_request *req)
1702 {
1703 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1704
1705 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1706 }
1707
safexcel_aead_decrypt(struct aead_request * req)1708 static int safexcel_aead_decrypt(struct aead_request *req)
1709 {
1710 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1711
1712 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1713 }
1714
safexcel_aead_cra_init(struct crypto_tfm * tfm)1715 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1716 {
1717 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1718 struct safexcel_alg_template *tmpl =
1719 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1720 alg.aead.base);
1721
1722 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1723 sizeof(struct safexcel_cipher_req));
1724
1725 ctx->base.priv = tmpl->priv;
1726
1727 ctx->alg = SAFEXCEL_AES; /* default */
1728 ctx->blocksz = AES_BLOCK_SIZE;
1729 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1730 ctx->ctrinit = 1;
1731 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1732 ctx->aead = true;
1733 ctx->base.send = safexcel_aead_send;
1734 ctx->base.handle_result = safexcel_aead_handle_result;
1735 return 0;
1736 }
1737
safexcel_aead_sha1_cra_init(struct crypto_tfm * tfm)1738 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1739 {
1740 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1741
1742 safexcel_aead_cra_init(tfm);
1743 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1744 ctx->state_sz = SHA1_DIGEST_SIZE;
1745 return 0;
1746 }
1747
1748 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1749 .type = SAFEXCEL_ALG_TYPE_AEAD,
1750 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1751 .alg.aead = {
1752 .setkey = safexcel_aead_setkey,
1753 .encrypt = safexcel_aead_encrypt,
1754 .decrypt = safexcel_aead_decrypt,
1755 .ivsize = AES_BLOCK_SIZE,
1756 .maxauthsize = SHA1_DIGEST_SIZE,
1757 .base = {
1758 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1759 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1760 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1761 .cra_flags = CRYPTO_ALG_ASYNC |
1762 CRYPTO_ALG_ALLOCATES_MEMORY |
1763 CRYPTO_ALG_KERN_DRIVER_ONLY,
1764 .cra_blocksize = AES_BLOCK_SIZE,
1765 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1766 .cra_alignmask = 0,
1767 .cra_init = safexcel_aead_sha1_cra_init,
1768 .cra_exit = safexcel_aead_cra_exit,
1769 .cra_module = THIS_MODULE,
1770 },
1771 },
1772 };
1773
safexcel_aead_sha256_cra_init(struct crypto_tfm * tfm)1774 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1775 {
1776 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1777
1778 safexcel_aead_cra_init(tfm);
1779 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1780 ctx->state_sz = SHA256_DIGEST_SIZE;
1781 return 0;
1782 }
1783
1784 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1785 .type = SAFEXCEL_ALG_TYPE_AEAD,
1786 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1787 .alg.aead = {
1788 .setkey = safexcel_aead_setkey,
1789 .encrypt = safexcel_aead_encrypt,
1790 .decrypt = safexcel_aead_decrypt,
1791 .ivsize = AES_BLOCK_SIZE,
1792 .maxauthsize = SHA256_DIGEST_SIZE,
1793 .base = {
1794 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1795 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1796 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1797 .cra_flags = CRYPTO_ALG_ASYNC |
1798 CRYPTO_ALG_ALLOCATES_MEMORY |
1799 CRYPTO_ALG_KERN_DRIVER_ONLY,
1800 .cra_blocksize = AES_BLOCK_SIZE,
1801 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1802 .cra_alignmask = 0,
1803 .cra_init = safexcel_aead_sha256_cra_init,
1804 .cra_exit = safexcel_aead_cra_exit,
1805 .cra_module = THIS_MODULE,
1806 },
1807 },
1808 };
1809
safexcel_aead_sha224_cra_init(struct crypto_tfm * tfm)1810 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1811 {
1812 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1813
1814 safexcel_aead_cra_init(tfm);
1815 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1816 ctx->state_sz = SHA256_DIGEST_SIZE;
1817 return 0;
1818 }
1819
1820 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1821 .type = SAFEXCEL_ALG_TYPE_AEAD,
1822 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1823 .alg.aead = {
1824 .setkey = safexcel_aead_setkey,
1825 .encrypt = safexcel_aead_encrypt,
1826 .decrypt = safexcel_aead_decrypt,
1827 .ivsize = AES_BLOCK_SIZE,
1828 .maxauthsize = SHA224_DIGEST_SIZE,
1829 .base = {
1830 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1831 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1832 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1833 .cra_flags = CRYPTO_ALG_ASYNC |
1834 CRYPTO_ALG_ALLOCATES_MEMORY |
1835 CRYPTO_ALG_KERN_DRIVER_ONLY,
1836 .cra_blocksize = AES_BLOCK_SIZE,
1837 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1838 .cra_alignmask = 0,
1839 .cra_init = safexcel_aead_sha224_cra_init,
1840 .cra_exit = safexcel_aead_cra_exit,
1841 .cra_module = THIS_MODULE,
1842 },
1843 },
1844 };
1845
safexcel_aead_sha512_cra_init(struct crypto_tfm * tfm)1846 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1847 {
1848 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1849
1850 safexcel_aead_cra_init(tfm);
1851 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1852 ctx->state_sz = SHA512_DIGEST_SIZE;
1853 return 0;
1854 }
1855
1856 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1857 .type = SAFEXCEL_ALG_TYPE_AEAD,
1858 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1859 .alg.aead = {
1860 .setkey = safexcel_aead_setkey,
1861 .encrypt = safexcel_aead_encrypt,
1862 .decrypt = safexcel_aead_decrypt,
1863 .ivsize = AES_BLOCK_SIZE,
1864 .maxauthsize = SHA512_DIGEST_SIZE,
1865 .base = {
1866 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1867 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1868 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1869 .cra_flags = CRYPTO_ALG_ASYNC |
1870 CRYPTO_ALG_ALLOCATES_MEMORY |
1871 CRYPTO_ALG_KERN_DRIVER_ONLY,
1872 .cra_blocksize = AES_BLOCK_SIZE,
1873 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874 .cra_alignmask = 0,
1875 .cra_init = safexcel_aead_sha512_cra_init,
1876 .cra_exit = safexcel_aead_cra_exit,
1877 .cra_module = THIS_MODULE,
1878 },
1879 },
1880 };
1881
safexcel_aead_sha384_cra_init(struct crypto_tfm * tfm)1882 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1883 {
1884 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886 safexcel_aead_cra_init(tfm);
1887 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1888 ctx->state_sz = SHA512_DIGEST_SIZE;
1889 return 0;
1890 }
1891
1892 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1893 .type = SAFEXCEL_ALG_TYPE_AEAD,
1894 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1895 .alg.aead = {
1896 .setkey = safexcel_aead_setkey,
1897 .encrypt = safexcel_aead_encrypt,
1898 .decrypt = safexcel_aead_decrypt,
1899 .ivsize = AES_BLOCK_SIZE,
1900 .maxauthsize = SHA384_DIGEST_SIZE,
1901 .base = {
1902 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1903 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1904 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1905 .cra_flags = CRYPTO_ALG_ASYNC |
1906 CRYPTO_ALG_ALLOCATES_MEMORY |
1907 CRYPTO_ALG_KERN_DRIVER_ONLY,
1908 .cra_blocksize = AES_BLOCK_SIZE,
1909 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1910 .cra_alignmask = 0,
1911 .cra_init = safexcel_aead_sha384_cra_init,
1912 .cra_exit = safexcel_aead_cra_exit,
1913 .cra_module = THIS_MODULE,
1914 },
1915 },
1916 };
1917
safexcel_aead_sha1_des3_cra_init(struct crypto_tfm * tfm)1918 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1919 {
1920 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1921
1922 safexcel_aead_sha1_cra_init(tfm);
1923 ctx->alg = SAFEXCEL_3DES; /* override default */
1924 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1925 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1926 return 0;
1927 }
1928
1929 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1930 .type = SAFEXCEL_ALG_TYPE_AEAD,
1931 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1932 .alg.aead = {
1933 .setkey = safexcel_aead_setkey,
1934 .encrypt = safexcel_aead_encrypt,
1935 .decrypt = safexcel_aead_decrypt,
1936 .ivsize = DES3_EDE_BLOCK_SIZE,
1937 .maxauthsize = SHA1_DIGEST_SIZE,
1938 .base = {
1939 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1940 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1941 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1942 .cra_flags = CRYPTO_ALG_ASYNC |
1943 CRYPTO_ALG_ALLOCATES_MEMORY |
1944 CRYPTO_ALG_KERN_DRIVER_ONLY,
1945 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1946 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1947 .cra_alignmask = 0,
1948 .cra_init = safexcel_aead_sha1_des3_cra_init,
1949 .cra_exit = safexcel_aead_cra_exit,
1950 .cra_module = THIS_MODULE,
1951 },
1952 },
1953 };
1954
safexcel_aead_sha256_des3_cra_init(struct crypto_tfm * tfm)1955 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1956 {
1957 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1958
1959 safexcel_aead_sha256_cra_init(tfm);
1960 ctx->alg = SAFEXCEL_3DES; /* override default */
1961 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1962 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1963 return 0;
1964 }
1965
1966 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1967 .type = SAFEXCEL_ALG_TYPE_AEAD,
1968 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1969 .alg.aead = {
1970 .setkey = safexcel_aead_setkey,
1971 .encrypt = safexcel_aead_encrypt,
1972 .decrypt = safexcel_aead_decrypt,
1973 .ivsize = DES3_EDE_BLOCK_SIZE,
1974 .maxauthsize = SHA256_DIGEST_SIZE,
1975 .base = {
1976 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1977 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1978 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1979 .cra_flags = CRYPTO_ALG_ASYNC |
1980 CRYPTO_ALG_ALLOCATES_MEMORY |
1981 CRYPTO_ALG_KERN_DRIVER_ONLY,
1982 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1983 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1984 .cra_alignmask = 0,
1985 .cra_init = safexcel_aead_sha256_des3_cra_init,
1986 .cra_exit = safexcel_aead_cra_exit,
1987 .cra_module = THIS_MODULE,
1988 },
1989 },
1990 };
1991
safexcel_aead_sha224_des3_cra_init(struct crypto_tfm * tfm)1992 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1993 {
1994 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1995
1996 safexcel_aead_sha224_cra_init(tfm);
1997 ctx->alg = SAFEXCEL_3DES; /* override default */
1998 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1999 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2000 return 0;
2001 }
2002
2003 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2004 .type = SAFEXCEL_ALG_TYPE_AEAD,
2005 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2006 .alg.aead = {
2007 .setkey = safexcel_aead_setkey,
2008 .encrypt = safexcel_aead_encrypt,
2009 .decrypt = safexcel_aead_decrypt,
2010 .ivsize = DES3_EDE_BLOCK_SIZE,
2011 .maxauthsize = SHA224_DIGEST_SIZE,
2012 .base = {
2013 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2014 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2015 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2016 .cra_flags = CRYPTO_ALG_ASYNC |
2017 CRYPTO_ALG_ALLOCATES_MEMORY |
2018 CRYPTO_ALG_KERN_DRIVER_ONLY,
2019 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2020 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2021 .cra_alignmask = 0,
2022 .cra_init = safexcel_aead_sha224_des3_cra_init,
2023 .cra_exit = safexcel_aead_cra_exit,
2024 .cra_module = THIS_MODULE,
2025 },
2026 },
2027 };
2028
safexcel_aead_sha512_des3_cra_init(struct crypto_tfm * tfm)2029 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2030 {
2031 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2032
2033 safexcel_aead_sha512_cra_init(tfm);
2034 ctx->alg = SAFEXCEL_3DES; /* override default */
2035 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2036 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2037 return 0;
2038 }
2039
2040 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2041 .type = SAFEXCEL_ALG_TYPE_AEAD,
2042 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2043 .alg.aead = {
2044 .setkey = safexcel_aead_setkey,
2045 .encrypt = safexcel_aead_encrypt,
2046 .decrypt = safexcel_aead_decrypt,
2047 .ivsize = DES3_EDE_BLOCK_SIZE,
2048 .maxauthsize = SHA512_DIGEST_SIZE,
2049 .base = {
2050 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2051 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2052 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2053 .cra_flags = CRYPTO_ALG_ASYNC |
2054 CRYPTO_ALG_ALLOCATES_MEMORY |
2055 CRYPTO_ALG_KERN_DRIVER_ONLY,
2056 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2057 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2058 .cra_alignmask = 0,
2059 .cra_init = safexcel_aead_sha512_des3_cra_init,
2060 .cra_exit = safexcel_aead_cra_exit,
2061 .cra_module = THIS_MODULE,
2062 },
2063 },
2064 };
2065
safexcel_aead_sha384_des3_cra_init(struct crypto_tfm * tfm)2066 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2067 {
2068 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2069
2070 safexcel_aead_sha384_cra_init(tfm);
2071 ctx->alg = SAFEXCEL_3DES; /* override default */
2072 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2073 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2074 return 0;
2075 }
2076
2077 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2078 .type = SAFEXCEL_ALG_TYPE_AEAD,
2079 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2080 .alg.aead = {
2081 .setkey = safexcel_aead_setkey,
2082 .encrypt = safexcel_aead_encrypt,
2083 .decrypt = safexcel_aead_decrypt,
2084 .ivsize = DES3_EDE_BLOCK_SIZE,
2085 .maxauthsize = SHA384_DIGEST_SIZE,
2086 .base = {
2087 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2088 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2089 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2090 .cra_flags = CRYPTO_ALG_ASYNC |
2091 CRYPTO_ALG_ALLOCATES_MEMORY |
2092 CRYPTO_ALG_KERN_DRIVER_ONLY,
2093 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2094 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2095 .cra_alignmask = 0,
2096 .cra_init = safexcel_aead_sha384_des3_cra_init,
2097 .cra_exit = safexcel_aead_cra_exit,
2098 .cra_module = THIS_MODULE,
2099 },
2100 },
2101 };
2102
safexcel_aead_sha1_des_cra_init(struct crypto_tfm * tfm)2103 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2104 {
2105 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2106
2107 safexcel_aead_sha1_cra_init(tfm);
2108 ctx->alg = SAFEXCEL_DES; /* override default */
2109 ctx->blocksz = DES_BLOCK_SIZE;
2110 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2111 return 0;
2112 }
2113
2114 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2115 .type = SAFEXCEL_ALG_TYPE_AEAD,
2116 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2117 .alg.aead = {
2118 .setkey = safexcel_aead_setkey,
2119 .encrypt = safexcel_aead_encrypt,
2120 .decrypt = safexcel_aead_decrypt,
2121 .ivsize = DES_BLOCK_SIZE,
2122 .maxauthsize = SHA1_DIGEST_SIZE,
2123 .base = {
2124 .cra_name = "authenc(hmac(sha1),cbc(des))",
2125 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2126 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2127 .cra_flags = CRYPTO_ALG_ASYNC |
2128 CRYPTO_ALG_ALLOCATES_MEMORY |
2129 CRYPTO_ALG_KERN_DRIVER_ONLY,
2130 .cra_blocksize = DES_BLOCK_SIZE,
2131 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2132 .cra_alignmask = 0,
2133 .cra_init = safexcel_aead_sha1_des_cra_init,
2134 .cra_exit = safexcel_aead_cra_exit,
2135 .cra_module = THIS_MODULE,
2136 },
2137 },
2138 };
2139
safexcel_aead_sha256_des_cra_init(struct crypto_tfm * tfm)2140 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2141 {
2142 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2143
2144 safexcel_aead_sha256_cra_init(tfm);
2145 ctx->alg = SAFEXCEL_DES; /* override default */
2146 ctx->blocksz = DES_BLOCK_SIZE;
2147 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2148 return 0;
2149 }
2150
2151 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2152 .type = SAFEXCEL_ALG_TYPE_AEAD,
2153 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2154 .alg.aead = {
2155 .setkey = safexcel_aead_setkey,
2156 .encrypt = safexcel_aead_encrypt,
2157 .decrypt = safexcel_aead_decrypt,
2158 .ivsize = DES_BLOCK_SIZE,
2159 .maxauthsize = SHA256_DIGEST_SIZE,
2160 .base = {
2161 .cra_name = "authenc(hmac(sha256),cbc(des))",
2162 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2163 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2164 .cra_flags = CRYPTO_ALG_ASYNC |
2165 CRYPTO_ALG_ALLOCATES_MEMORY |
2166 CRYPTO_ALG_KERN_DRIVER_ONLY,
2167 .cra_blocksize = DES_BLOCK_SIZE,
2168 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2169 .cra_alignmask = 0,
2170 .cra_init = safexcel_aead_sha256_des_cra_init,
2171 .cra_exit = safexcel_aead_cra_exit,
2172 .cra_module = THIS_MODULE,
2173 },
2174 },
2175 };
2176
safexcel_aead_sha224_des_cra_init(struct crypto_tfm * tfm)2177 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2178 {
2179 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2180
2181 safexcel_aead_sha224_cra_init(tfm);
2182 ctx->alg = SAFEXCEL_DES; /* override default */
2183 ctx->blocksz = DES_BLOCK_SIZE;
2184 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2185 return 0;
2186 }
2187
2188 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2189 .type = SAFEXCEL_ALG_TYPE_AEAD,
2190 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2191 .alg.aead = {
2192 .setkey = safexcel_aead_setkey,
2193 .encrypt = safexcel_aead_encrypt,
2194 .decrypt = safexcel_aead_decrypt,
2195 .ivsize = DES_BLOCK_SIZE,
2196 .maxauthsize = SHA224_DIGEST_SIZE,
2197 .base = {
2198 .cra_name = "authenc(hmac(sha224),cbc(des))",
2199 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2200 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2201 .cra_flags = CRYPTO_ALG_ASYNC |
2202 CRYPTO_ALG_ALLOCATES_MEMORY |
2203 CRYPTO_ALG_KERN_DRIVER_ONLY,
2204 .cra_blocksize = DES_BLOCK_SIZE,
2205 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2206 .cra_alignmask = 0,
2207 .cra_init = safexcel_aead_sha224_des_cra_init,
2208 .cra_exit = safexcel_aead_cra_exit,
2209 .cra_module = THIS_MODULE,
2210 },
2211 },
2212 };
2213
safexcel_aead_sha512_des_cra_init(struct crypto_tfm * tfm)2214 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2215 {
2216 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2217
2218 safexcel_aead_sha512_cra_init(tfm);
2219 ctx->alg = SAFEXCEL_DES; /* override default */
2220 ctx->blocksz = DES_BLOCK_SIZE;
2221 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2222 return 0;
2223 }
2224
2225 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2226 .type = SAFEXCEL_ALG_TYPE_AEAD,
2227 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2228 .alg.aead = {
2229 .setkey = safexcel_aead_setkey,
2230 .encrypt = safexcel_aead_encrypt,
2231 .decrypt = safexcel_aead_decrypt,
2232 .ivsize = DES_BLOCK_SIZE,
2233 .maxauthsize = SHA512_DIGEST_SIZE,
2234 .base = {
2235 .cra_name = "authenc(hmac(sha512),cbc(des))",
2236 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2237 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2238 .cra_flags = CRYPTO_ALG_ASYNC |
2239 CRYPTO_ALG_ALLOCATES_MEMORY |
2240 CRYPTO_ALG_KERN_DRIVER_ONLY,
2241 .cra_blocksize = DES_BLOCK_SIZE,
2242 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2243 .cra_alignmask = 0,
2244 .cra_init = safexcel_aead_sha512_des_cra_init,
2245 .cra_exit = safexcel_aead_cra_exit,
2246 .cra_module = THIS_MODULE,
2247 },
2248 },
2249 };
2250
safexcel_aead_sha384_des_cra_init(struct crypto_tfm * tfm)2251 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2252 {
2253 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2254
2255 safexcel_aead_sha384_cra_init(tfm);
2256 ctx->alg = SAFEXCEL_DES; /* override default */
2257 ctx->blocksz = DES_BLOCK_SIZE;
2258 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2259 return 0;
2260 }
2261
2262 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2263 .type = SAFEXCEL_ALG_TYPE_AEAD,
2264 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2265 .alg.aead = {
2266 .setkey = safexcel_aead_setkey,
2267 .encrypt = safexcel_aead_encrypt,
2268 .decrypt = safexcel_aead_decrypt,
2269 .ivsize = DES_BLOCK_SIZE,
2270 .maxauthsize = SHA384_DIGEST_SIZE,
2271 .base = {
2272 .cra_name = "authenc(hmac(sha384),cbc(des))",
2273 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2274 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2275 .cra_flags = CRYPTO_ALG_ASYNC |
2276 CRYPTO_ALG_ALLOCATES_MEMORY |
2277 CRYPTO_ALG_KERN_DRIVER_ONLY,
2278 .cra_blocksize = DES_BLOCK_SIZE,
2279 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2280 .cra_alignmask = 0,
2281 .cra_init = safexcel_aead_sha384_des_cra_init,
2282 .cra_exit = safexcel_aead_cra_exit,
2283 .cra_module = THIS_MODULE,
2284 },
2285 },
2286 };
2287
safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm * tfm)2288 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2289 {
2290 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2291
2292 safexcel_aead_sha1_cra_init(tfm);
2293 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2294 return 0;
2295 }
2296
2297 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2298 .type = SAFEXCEL_ALG_TYPE_AEAD,
2299 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2300 .alg.aead = {
2301 .setkey = safexcel_aead_setkey,
2302 .encrypt = safexcel_aead_encrypt,
2303 .decrypt = safexcel_aead_decrypt,
2304 .ivsize = CTR_RFC3686_IV_SIZE,
2305 .maxauthsize = SHA1_DIGEST_SIZE,
2306 .base = {
2307 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2308 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2309 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2310 .cra_flags = CRYPTO_ALG_ASYNC |
2311 CRYPTO_ALG_ALLOCATES_MEMORY |
2312 CRYPTO_ALG_KERN_DRIVER_ONLY,
2313 .cra_blocksize = 1,
2314 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2315 .cra_alignmask = 0,
2316 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2317 .cra_exit = safexcel_aead_cra_exit,
2318 .cra_module = THIS_MODULE,
2319 },
2320 },
2321 };
2322
safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm * tfm)2323 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2324 {
2325 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2326
2327 safexcel_aead_sha256_cra_init(tfm);
2328 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2329 return 0;
2330 }
2331
2332 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2333 .type = SAFEXCEL_ALG_TYPE_AEAD,
2334 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2335 .alg.aead = {
2336 .setkey = safexcel_aead_setkey,
2337 .encrypt = safexcel_aead_encrypt,
2338 .decrypt = safexcel_aead_decrypt,
2339 .ivsize = CTR_RFC3686_IV_SIZE,
2340 .maxauthsize = SHA256_DIGEST_SIZE,
2341 .base = {
2342 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2343 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2344 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2345 .cra_flags = CRYPTO_ALG_ASYNC |
2346 CRYPTO_ALG_ALLOCATES_MEMORY |
2347 CRYPTO_ALG_KERN_DRIVER_ONLY,
2348 .cra_blocksize = 1,
2349 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2350 .cra_alignmask = 0,
2351 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2352 .cra_exit = safexcel_aead_cra_exit,
2353 .cra_module = THIS_MODULE,
2354 },
2355 },
2356 };
2357
safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm * tfm)2358 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2359 {
2360 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2361
2362 safexcel_aead_sha224_cra_init(tfm);
2363 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2364 return 0;
2365 }
2366
2367 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2368 .type = SAFEXCEL_ALG_TYPE_AEAD,
2369 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2370 .alg.aead = {
2371 .setkey = safexcel_aead_setkey,
2372 .encrypt = safexcel_aead_encrypt,
2373 .decrypt = safexcel_aead_decrypt,
2374 .ivsize = CTR_RFC3686_IV_SIZE,
2375 .maxauthsize = SHA224_DIGEST_SIZE,
2376 .base = {
2377 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2378 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2379 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2380 .cra_flags = CRYPTO_ALG_ASYNC |
2381 CRYPTO_ALG_ALLOCATES_MEMORY |
2382 CRYPTO_ALG_KERN_DRIVER_ONLY,
2383 .cra_blocksize = 1,
2384 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2385 .cra_alignmask = 0,
2386 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2387 .cra_exit = safexcel_aead_cra_exit,
2388 .cra_module = THIS_MODULE,
2389 },
2390 },
2391 };
2392
safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm * tfm)2393 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2394 {
2395 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2396
2397 safexcel_aead_sha512_cra_init(tfm);
2398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2399 return 0;
2400 }
2401
2402 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2403 .type = SAFEXCEL_ALG_TYPE_AEAD,
2404 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2405 .alg.aead = {
2406 .setkey = safexcel_aead_setkey,
2407 .encrypt = safexcel_aead_encrypt,
2408 .decrypt = safexcel_aead_decrypt,
2409 .ivsize = CTR_RFC3686_IV_SIZE,
2410 .maxauthsize = SHA512_DIGEST_SIZE,
2411 .base = {
2412 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2413 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2414 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2415 .cra_flags = CRYPTO_ALG_ASYNC |
2416 CRYPTO_ALG_ALLOCATES_MEMORY |
2417 CRYPTO_ALG_KERN_DRIVER_ONLY,
2418 .cra_blocksize = 1,
2419 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2420 .cra_alignmask = 0,
2421 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2422 .cra_exit = safexcel_aead_cra_exit,
2423 .cra_module = THIS_MODULE,
2424 },
2425 },
2426 };
2427
safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm * tfm)2428 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2429 {
2430 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2431
2432 safexcel_aead_sha384_cra_init(tfm);
2433 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2434 return 0;
2435 }
2436
2437 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2438 .type = SAFEXCEL_ALG_TYPE_AEAD,
2439 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2440 .alg.aead = {
2441 .setkey = safexcel_aead_setkey,
2442 .encrypt = safexcel_aead_encrypt,
2443 .decrypt = safexcel_aead_decrypt,
2444 .ivsize = CTR_RFC3686_IV_SIZE,
2445 .maxauthsize = SHA384_DIGEST_SIZE,
2446 .base = {
2447 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2448 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2449 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2450 .cra_flags = CRYPTO_ALG_ASYNC |
2451 CRYPTO_ALG_ALLOCATES_MEMORY |
2452 CRYPTO_ALG_KERN_DRIVER_ONLY,
2453 .cra_blocksize = 1,
2454 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2455 .cra_alignmask = 0,
2456 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2457 .cra_exit = safexcel_aead_cra_exit,
2458 .cra_module = THIS_MODULE,
2459 },
2460 },
2461 };
2462
safexcel_skcipher_aesxts_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2463 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2464 const u8 *key, unsigned int len)
2465 {
2466 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2467 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2468 struct safexcel_crypto_priv *priv = ctx->base.priv;
2469 struct crypto_aes_ctx aes;
2470 int ret, i;
2471 unsigned int keylen;
2472
2473 /* Check for illegal XTS keys */
2474 ret = xts_verify_key(ctfm, key, len);
2475 if (ret)
2476 return ret;
2477
2478 /* Only half of the key data is cipher key */
2479 keylen = (len >> 1);
2480 ret = aes_expandkey(&aes, key, keylen);
2481 if (ret)
2482 return ret;
2483
2484 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2485 for (i = 0; i < keylen / sizeof(u32); i++) {
2486 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2487 ctx->base.needs_inv = true;
2488 break;
2489 }
2490 }
2491 }
2492
2493 for (i = 0; i < keylen / sizeof(u32); i++)
2494 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2495
2496 /* The other half is the tweak key */
2497 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2498 if (ret)
2499 return ret;
2500
2501 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2502 for (i = 0; i < keylen / sizeof(u32); i++) {
2503 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2504 aes.key_enc[i]) {
2505 ctx->base.needs_inv = true;
2506 break;
2507 }
2508 }
2509 }
2510
2511 for (i = 0; i < keylen / sizeof(u32); i++)
2512 ctx->key[i + keylen / sizeof(u32)] =
2513 cpu_to_le32(aes.key_enc[i]);
2514
2515 ctx->key_len = keylen << 1;
2516
2517 memzero_explicit(&aes, sizeof(aes));
2518 return 0;
2519 }
2520
safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm * tfm)2521 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2522 {
2523 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2524
2525 safexcel_skcipher_cra_init(tfm);
2526 ctx->alg = SAFEXCEL_AES;
2527 ctx->blocksz = AES_BLOCK_SIZE;
2528 ctx->xts = 1;
2529 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2530 return 0;
2531 }
2532
safexcel_encrypt_xts(struct skcipher_request * req)2533 static int safexcel_encrypt_xts(struct skcipher_request *req)
2534 {
2535 if (req->cryptlen < XTS_BLOCK_SIZE)
2536 return -EINVAL;
2537 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2538 SAFEXCEL_ENCRYPT);
2539 }
2540
safexcel_decrypt_xts(struct skcipher_request * req)2541 static int safexcel_decrypt_xts(struct skcipher_request *req)
2542 {
2543 if (req->cryptlen < XTS_BLOCK_SIZE)
2544 return -EINVAL;
2545 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2546 SAFEXCEL_DECRYPT);
2547 }
2548
2549 struct safexcel_alg_template safexcel_alg_xts_aes = {
2550 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2551 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2552 .alg.skcipher = {
2553 .setkey = safexcel_skcipher_aesxts_setkey,
2554 .encrypt = safexcel_encrypt_xts,
2555 .decrypt = safexcel_decrypt_xts,
2556 /* XTS actually uses 2 AES keys glued together */
2557 .min_keysize = AES_MIN_KEY_SIZE * 2,
2558 .max_keysize = AES_MAX_KEY_SIZE * 2,
2559 .ivsize = XTS_BLOCK_SIZE,
2560 .base = {
2561 .cra_name = "xts(aes)",
2562 .cra_driver_name = "safexcel-xts-aes",
2563 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2564 .cra_flags = CRYPTO_ALG_ASYNC |
2565 CRYPTO_ALG_ALLOCATES_MEMORY |
2566 CRYPTO_ALG_KERN_DRIVER_ONLY,
2567 .cra_blocksize = XTS_BLOCK_SIZE,
2568 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2569 .cra_alignmask = 0,
2570 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2571 .cra_exit = safexcel_skcipher_cra_exit,
2572 .cra_module = THIS_MODULE,
2573 },
2574 },
2575 };
2576
safexcel_aead_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2577 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2578 unsigned int len)
2579 {
2580 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2581 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2582 struct safexcel_crypto_priv *priv = ctx->base.priv;
2583 struct crypto_aes_ctx aes;
2584 u32 hashkey[AES_BLOCK_SIZE >> 2];
2585 int ret, i;
2586
2587 ret = aes_expandkey(&aes, key, len);
2588 if (ret) {
2589 memzero_explicit(&aes, sizeof(aes));
2590 return ret;
2591 }
2592
2593 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2594 for (i = 0; i < len / sizeof(u32); i++) {
2595 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2596 ctx->base.needs_inv = true;
2597 break;
2598 }
2599 }
2600 }
2601
2602 for (i = 0; i < len / sizeof(u32); i++)
2603 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2604
2605 ctx->key_len = len;
2606
2607 /* Compute hash key by encrypting zeroes with cipher key */
2608 memset(hashkey, 0, AES_BLOCK_SIZE);
2609 aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2610
2611 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2612 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2613 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2614 ctx->base.needs_inv = true;
2615 break;
2616 }
2617 }
2618 }
2619
2620 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2621 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2622
2623 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2624 memzero_explicit(&aes, sizeof(aes));
2625 return 0;
2626 }
2627
safexcel_aead_gcm_cra_init(struct crypto_tfm * tfm)2628 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2629 {
2630 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2631
2632 safexcel_aead_cra_init(tfm);
2633 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2634 ctx->state_sz = GHASH_BLOCK_SIZE;
2635 ctx->xcm = EIP197_XCM_MODE_GCM;
2636 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2637
2638 return 0;
2639 }
2640
safexcel_aead_gcm_cra_exit(struct crypto_tfm * tfm)2641 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2642 {
2643 safexcel_aead_cra_exit(tfm);
2644 }
2645
safexcel_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2646 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2647 unsigned int authsize)
2648 {
2649 return crypto_gcm_check_authsize(authsize);
2650 }
2651
2652 struct safexcel_alg_template safexcel_alg_gcm = {
2653 .type = SAFEXCEL_ALG_TYPE_AEAD,
2654 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2655 .alg.aead = {
2656 .setkey = safexcel_aead_gcm_setkey,
2657 .setauthsize = safexcel_aead_gcm_setauthsize,
2658 .encrypt = safexcel_aead_encrypt,
2659 .decrypt = safexcel_aead_decrypt,
2660 .ivsize = GCM_AES_IV_SIZE,
2661 .maxauthsize = GHASH_DIGEST_SIZE,
2662 .base = {
2663 .cra_name = "gcm(aes)",
2664 .cra_driver_name = "safexcel-gcm-aes",
2665 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2666 .cra_flags = CRYPTO_ALG_ASYNC |
2667 CRYPTO_ALG_ALLOCATES_MEMORY |
2668 CRYPTO_ALG_KERN_DRIVER_ONLY,
2669 .cra_blocksize = 1,
2670 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2671 .cra_alignmask = 0,
2672 .cra_init = safexcel_aead_gcm_cra_init,
2673 .cra_exit = safexcel_aead_gcm_cra_exit,
2674 .cra_module = THIS_MODULE,
2675 },
2676 },
2677 };
2678
safexcel_aead_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2679 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2680 unsigned int len)
2681 {
2682 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2683 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2684 struct safexcel_crypto_priv *priv = ctx->base.priv;
2685 struct crypto_aes_ctx aes;
2686 int ret, i;
2687
2688 ret = aes_expandkey(&aes, key, len);
2689 if (ret) {
2690 memzero_explicit(&aes, sizeof(aes));
2691 return ret;
2692 }
2693
2694 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2695 for (i = 0; i < len / sizeof(u32); i++) {
2696 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2697 ctx->base.needs_inv = true;
2698 break;
2699 }
2700 }
2701 }
2702
2703 for (i = 0; i < len / sizeof(u32); i++) {
2704 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2705 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2706 cpu_to_be32(aes.key_enc[i]);
2707 }
2708
2709 ctx->key_len = len;
2710 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2711
2712 if (len == AES_KEYSIZE_192)
2713 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2714 else if (len == AES_KEYSIZE_256)
2715 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2716 else
2717 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2718
2719 memzero_explicit(&aes, sizeof(aes));
2720 return 0;
2721 }
2722
safexcel_aead_ccm_cra_init(struct crypto_tfm * tfm)2723 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2724 {
2725 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2726
2727 safexcel_aead_cra_init(tfm);
2728 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2729 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2730 ctx->xcm = EIP197_XCM_MODE_CCM;
2731 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2732 ctx->ctrinit = 0;
2733 return 0;
2734 }
2735
safexcel_aead_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2736 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2737 unsigned int authsize)
2738 {
2739 /* Borrowed from crypto/ccm.c */
2740 switch (authsize) {
2741 case 4:
2742 case 6:
2743 case 8:
2744 case 10:
2745 case 12:
2746 case 14:
2747 case 16:
2748 break;
2749 default:
2750 return -EINVAL;
2751 }
2752
2753 return 0;
2754 }
2755
safexcel_ccm_encrypt(struct aead_request * req)2756 static int safexcel_ccm_encrypt(struct aead_request *req)
2757 {
2758 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2759
2760 if (req->iv[0] < 1 || req->iv[0] > 7)
2761 return -EINVAL;
2762
2763 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2764 }
2765
safexcel_ccm_decrypt(struct aead_request * req)2766 static int safexcel_ccm_decrypt(struct aead_request *req)
2767 {
2768 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2769
2770 if (req->iv[0] < 1 || req->iv[0] > 7)
2771 return -EINVAL;
2772
2773 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2774 }
2775
2776 struct safexcel_alg_template safexcel_alg_ccm = {
2777 .type = SAFEXCEL_ALG_TYPE_AEAD,
2778 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2779 .alg.aead = {
2780 .setkey = safexcel_aead_ccm_setkey,
2781 .setauthsize = safexcel_aead_ccm_setauthsize,
2782 .encrypt = safexcel_ccm_encrypt,
2783 .decrypt = safexcel_ccm_decrypt,
2784 .ivsize = AES_BLOCK_SIZE,
2785 .maxauthsize = AES_BLOCK_SIZE,
2786 .base = {
2787 .cra_name = "ccm(aes)",
2788 .cra_driver_name = "safexcel-ccm-aes",
2789 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2790 .cra_flags = CRYPTO_ALG_ASYNC |
2791 CRYPTO_ALG_ALLOCATES_MEMORY |
2792 CRYPTO_ALG_KERN_DRIVER_ONLY,
2793 .cra_blocksize = 1,
2794 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2795 .cra_alignmask = 0,
2796 .cra_init = safexcel_aead_ccm_cra_init,
2797 .cra_exit = safexcel_aead_cra_exit,
2798 .cra_module = THIS_MODULE,
2799 },
2800 },
2801 };
2802
safexcel_chacha20_setkey(struct safexcel_cipher_ctx * ctx,const u8 * key)2803 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2804 const u8 *key)
2805 {
2806 struct safexcel_crypto_priv *priv = ctx->base.priv;
2807
2808 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2809 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2810 ctx->base.needs_inv = true;
2811
2812 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2813 ctx->key_len = CHACHA_KEY_SIZE;
2814 }
2815
safexcel_skcipher_chacha20_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2816 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2817 const u8 *key, unsigned int len)
2818 {
2819 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2820
2821 if (len != CHACHA_KEY_SIZE)
2822 return -EINVAL;
2823
2824 safexcel_chacha20_setkey(ctx, key);
2825
2826 return 0;
2827 }
2828
safexcel_skcipher_chacha20_cra_init(struct crypto_tfm * tfm)2829 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2830 {
2831 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2832
2833 safexcel_skcipher_cra_init(tfm);
2834 ctx->alg = SAFEXCEL_CHACHA20;
2835 ctx->ctrinit = 0;
2836 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2837 return 0;
2838 }
2839
2840 struct safexcel_alg_template safexcel_alg_chacha20 = {
2841 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2842 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2843 .alg.skcipher = {
2844 .setkey = safexcel_skcipher_chacha20_setkey,
2845 .encrypt = safexcel_encrypt,
2846 .decrypt = safexcel_decrypt,
2847 .min_keysize = CHACHA_KEY_SIZE,
2848 .max_keysize = CHACHA_KEY_SIZE,
2849 .ivsize = CHACHA_IV_SIZE,
2850 .base = {
2851 .cra_name = "chacha20",
2852 .cra_driver_name = "safexcel-chacha20",
2853 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2854 .cra_flags = CRYPTO_ALG_ASYNC |
2855 CRYPTO_ALG_ALLOCATES_MEMORY |
2856 CRYPTO_ALG_KERN_DRIVER_ONLY,
2857 .cra_blocksize = 1,
2858 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2859 .cra_alignmask = 0,
2860 .cra_init = safexcel_skcipher_chacha20_cra_init,
2861 .cra_exit = safexcel_skcipher_cra_exit,
2862 .cra_module = THIS_MODULE,
2863 },
2864 },
2865 };
2866
safexcel_aead_chachapoly_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2867 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2868 const u8 *key, unsigned int len)
2869 {
2870 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2871
2872 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2873 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2874 /* ESP variant has nonce appended to key */
2875 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2876 ctx->nonce = *(u32 *)(key + len);
2877 }
2878 if (len != CHACHA_KEY_SIZE)
2879 return -EINVAL;
2880
2881 safexcel_chacha20_setkey(ctx, key);
2882
2883 return 0;
2884 }
2885
safexcel_aead_chachapoly_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2886 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2887 unsigned int authsize)
2888 {
2889 if (authsize != POLY1305_DIGEST_SIZE)
2890 return -EINVAL;
2891 return 0;
2892 }
2893
safexcel_aead_chachapoly_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)2894 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2895 enum safexcel_cipher_direction dir)
2896 {
2897 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2898 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2899 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2900 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2901 struct aead_request *subreq = aead_request_ctx(req);
2902 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2903 int ret = 0;
2904
2905 /*
2906 * Instead of wasting time detecting umpteen silly corner cases,
2907 * just dump all "small" requests to the fallback implementation.
2908 * HW would not be faster on such small requests anyway.
2909 */
2910 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2911 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2912 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2913 return safexcel_queue_req(&req->base, creq, dir);
2914 }
2915
2916 /* HW cannot do full (AAD+payload) zero length, use fallback */
2917 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2918 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2919 /* ESP variant has nonce appended to the key */
2920 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2921 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2922 CHACHA_KEY_SIZE +
2923 EIP197_AEAD_IPSEC_NONCE_SIZE);
2924 } else {
2925 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2926 CHACHA_KEY_SIZE);
2927 }
2928 if (ret) {
2929 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2930 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2931 CRYPTO_TFM_REQ_MASK);
2932 return ret;
2933 }
2934
2935 aead_request_set_tfm(subreq, ctx->fback);
2936 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2937 req->base.data);
2938 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2939 req->iv);
2940 aead_request_set_ad(subreq, req->assoclen);
2941
2942 return (dir == SAFEXCEL_ENCRYPT) ?
2943 crypto_aead_encrypt(subreq) :
2944 crypto_aead_decrypt(subreq);
2945 }
2946
safexcel_aead_chachapoly_encrypt(struct aead_request * req)2947 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2948 {
2949 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2950 }
2951
safexcel_aead_chachapoly_decrypt(struct aead_request * req)2952 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2953 {
2954 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2955 }
2956
safexcel_aead_fallback_cra_init(struct crypto_tfm * tfm)2957 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2958 {
2959 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2960 struct aead_alg *alg = crypto_aead_alg(aead);
2961 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2962
2963 safexcel_aead_cra_init(tfm);
2964
2965 /* Allocate fallback implementation */
2966 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2967 CRYPTO_ALG_ASYNC |
2968 CRYPTO_ALG_NEED_FALLBACK);
2969 if (IS_ERR(ctx->fback))
2970 return PTR_ERR(ctx->fback);
2971
2972 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2973 sizeof(struct aead_request) +
2974 crypto_aead_reqsize(ctx->fback)));
2975
2976 return 0;
2977 }
2978
safexcel_aead_chachapoly_cra_init(struct crypto_tfm * tfm)2979 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2980 {
2981 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2982
2983 safexcel_aead_fallback_cra_init(tfm);
2984 ctx->alg = SAFEXCEL_CHACHA20;
2985 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2986 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2987 ctx->ctrinit = 0;
2988 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2989 ctx->state_sz = 0; /* Precomputed by HW */
2990 return 0;
2991 }
2992
safexcel_aead_fallback_cra_exit(struct crypto_tfm * tfm)2993 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2994 {
2995 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2996
2997 crypto_free_aead(ctx->fback);
2998 safexcel_aead_cra_exit(tfm);
2999 }
3000
3001 struct safexcel_alg_template safexcel_alg_chachapoly = {
3002 .type = SAFEXCEL_ALG_TYPE_AEAD,
3003 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3004 .alg.aead = {
3005 .setkey = safexcel_aead_chachapoly_setkey,
3006 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3007 .encrypt = safexcel_aead_chachapoly_encrypt,
3008 .decrypt = safexcel_aead_chachapoly_decrypt,
3009 .ivsize = CHACHAPOLY_IV_SIZE,
3010 .maxauthsize = POLY1305_DIGEST_SIZE,
3011 .base = {
3012 .cra_name = "rfc7539(chacha20,poly1305)",
3013 .cra_driver_name = "safexcel-chacha20-poly1305",
3014 /* +1 to put it above HW chacha + SW poly */
3015 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3016 .cra_flags = CRYPTO_ALG_ASYNC |
3017 CRYPTO_ALG_ALLOCATES_MEMORY |
3018 CRYPTO_ALG_KERN_DRIVER_ONLY |
3019 CRYPTO_ALG_NEED_FALLBACK,
3020 .cra_blocksize = 1,
3021 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3022 .cra_alignmask = 0,
3023 .cra_init = safexcel_aead_chachapoly_cra_init,
3024 .cra_exit = safexcel_aead_fallback_cra_exit,
3025 .cra_module = THIS_MODULE,
3026 },
3027 },
3028 };
3029
safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm * tfm)3030 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3031 {
3032 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3033 int ret;
3034
3035 ret = safexcel_aead_chachapoly_cra_init(tfm);
3036 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3037 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3038 return ret;
3039 }
3040
3041 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3042 .type = SAFEXCEL_ALG_TYPE_AEAD,
3043 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3044 .alg.aead = {
3045 .setkey = safexcel_aead_chachapoly_setkey,
3046 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3047 .encrypt = safexcel_aead_chachapoly_encrypt,
3048 .decrypt = safexcel_aead_chachapoly_decrypt,
3049 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3050 .maxauthsize = POLY1305_DIGEST_SIZE,
3051 .base = {
3052 .cra_name = "rfc7539esp(chacha20,poly1305)",
3053 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3054 /* +1 to put it above HW chacha + SW poly */
3055 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3056 .cra_flags = CRYPTO_ALG_ASYNC |
3057 CRYPTO_ALG_ALLOCATES_MEMORY |
3058 CRYPTO_ALG_KERN_DRIVER_ONLY |
3059 CRYPTO_ALG_NEED_FALLBACK,
3060 .cra_blocksize = 1,
3061 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3062 .cra_alignmask = 0,
3063 .cra_init = safexcel_aead_chachapolyesp_cra_init,
3064 .cra_exit = safexcel_aead_fallback_cra_exit,
3065 .cra_module = THIS_MODULE,
3066 },
3067 },
3068 };
3069
safexcel_skcipher_sm4_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3070 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3071 const u8 *key, unsigned int len)
3072 {
3073 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3074 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3075 struct safexcel_crypto_priv *priv = ctx->base.priv;
3076
3077 if (len != SM4_KEY_SIZE)
3078 return -EINVAL;
3079
3080 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3081 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3082 ctx->base.needs_inv = true;
3083
3084 memcpy(ctx->key, key, SM4_KEY_SIZE);
3085 ctx->key_len = SM4_KEY_SIZE;
3086
3087 return 0;
3088 }
3089
safexcel_sm4_blk_encrypt(struct skcipher_request * req)3090 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3091 {
3092 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3093 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3094 return -EINVAL;
3095 else
3096 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3097 SAFEXCEL_ENCRYPT);
3098 }
3099
safexcel_sm4_blk_decrypt(struct skcipher_request * req)3100 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3101 {
3102 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3103 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3104 return -EINVAL;
3105 else
3106 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3107 SAFEXCEL_DECRYPT);
3108 }
3109
safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm * tfm)3110 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3111 {
3112 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3113
3114 safexcel_skcipher_cra_init(tfm);
3115 ctx->alg = SAFEXCEL_SM4;
3116 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3117 ctx->blocksz = 0;
3118 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3119 return 0;
3120 }
3121
3122 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3123 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3124 .algo_mask = SAFEXCEL_ALG_SM4,
3125 .alg.skcipher = {
3126 .setkey = safexcel_skcipher_sm4_setkey,
3127 .encrypt = safexcel_sm4_blk_encrypt,
3128 .decrypt = safexcel_sm4_blk_decrypt,
3129 .min_keysize = SM4_KEY_SIZE,
3130 .max_keysize = SM4_KEY_SIZE,
3131 .base = {
3132 .cra_name = "ecb(sm4)",
3133 .cra_driver_name = "safexcel-ecb-sm4",
3134 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3135 .cra_flags = CRYPTO_ALG_ASYNC |
3136 CRYPTO_ALG_ALLOCATES_MEMORY |
3137 CRYPTO_ALG_KERN_DRIVER_ONLY,
3138 .cra_blocksize = SM4_BLOCK_SIZE,
3139 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3140 .cra_alignmask = 0,
3141 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3142 .cra_exit = safexcel_skcipher_cra_exit,
3143 .cra_module = THIS_MODULE,
3144 },
3145 },
3146 };
3147
safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm * tfm)3148 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3149 {
3150 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3151
3152 safexcel_skcipher_cra_init(tfm);
3153 ctx->alg = SAFEXCEL_SM4;
3154 ctx->blocksz = SM4_BLOCK_SIZE;
3155 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3156 return 0;
3157 }
3158
3159 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3160 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3161 .algo_mask = SAFEXCEL_ALG_SM4,
3162 .alg.skcipher = {
3163 .setkey = safexcel_skcipher_sm4_setkey,
3164 .encrypt = safexcel_sm4_blk_encrypt,
3165 .decrypt = safexcel_sm4_blk_decrypt,
3166 .min_keysize = SM4_KEY_SIZE,
3167 .max_keysize = SM4_KEY_SIZE,
3168 .ivsize = SM4_BLOCK_SIZE,
3169 .base = {
3170 .cra_name = "cbc(sm4)",
3171 .cra_driver_name = "safexcel-cbc-sm4",
3172 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3173 .cra_flags = CRYPTO_ALG_ASYNC |
3174 CRYPTO_ALG_ALLOCATES_MEMORY |
3175 CRYPTO_ALG_KERN_DRIVER_ONLY,
3176 .cra_blocksize = SM4_BLOCK_SIZE,
3177 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3178 .cra_alignmask = 0,
3179 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3180 .cra_exit = safexcel_skcipher_cra_exit,
3181 .cra_module = THIS_MODULE,
3182 },
3183 },
3184 };
3185
safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm * tfm)3186 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3187 {
3188 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3189
3190 safexcel_skcipher_cra_init(tfm);
3191 ctx->alg = SAFEXCEL_SM4;
3192 ctx->blocksz = SM4_BLOCK_SIZE;
3193 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3194 return 0;
3195 }
3196
3197 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3198 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3199 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3200 .alg.skcipher = {
3201 .setkey = safexcel_skcipher_sm4_setkey,
3202 .encrypt = safexcel_encrypt,
3203 .decrypt = safexcel_decrypt,
3204 .min_keysize = SM4_KEY_SIZE,
3205 .max_keysize = SM4_KEY_SIZE,
3206 .ivsize = SM4_BLOCK_SIZE,
3207 .base = {
3208 .cra_name = "ofb(sm4)",
3209 .cra_driver_name = "safexcel-ofb-sm4",
3210 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3211 .cra_flags = CRYPTO_ALG_ASYNC |
3212 CRYPTO_ALG_ALLOCATES_MEMORY |
3213 CRYPTO_ALG_KERN_DRIVER_ONLY,
3214 .cra_blocksize = 1,
3215 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3216 .cra_alignmask = 0,
3217 .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3218 .cra_exit = safexcel_skcipher_cra_exit,
3219 .cra_module = THIS_MODULE,
3220 },
3221 },
3222 };
3223
safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm * tfm)3224 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3225 {
3226 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3227
3228 safexcel_skcipher_cra_init(tfm);
3229 ctx->alg = SAFEXCEL_SM4;
3230 ctx->blocksz = SM4_BLOCK_SIZE;
3231 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3232 return 0;
3233 }
3234
3235 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3236 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3237 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3238 .alg.skcipher = {
3239 .setkey = safexcel_skcipher_sm4_setkey,
3240 .encrypt = safexcel_encrypt,
3241 .decrypt = safexcel_decrypt,
3242 .min_keysize = SM4_KEY_SIZE,
3243 .max_keysize = SM4_KEY_SIZE,
3244 .ivsize = SM4_BLOCK_SIZE,
3245 .base = {
3246 .cra_name = "cfb(sm4)",
3247 .cra_driver_name = "safexcel-cfb-sm4",
3248 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3249 .cra_flags = CRYPTO_ALG_ASYNC |
3250 CRYPTO_ALG_ALLOCATES_MEMORY |
3251 CRYPTO_ALG_KERN_DRIVER_ONLY,
3252 .cra_blocksize = 1,
3253 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3254 .cra_alignmask = 0,
3255 .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3256 .cra_exit = safexcel_skcipher_cra_exit,
3257 .cra_module = THIS_MODULE,
3258 },
3259 },
3260 };
3261
safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3262 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3263 const u8 *key, unsigned int len)
3264 {
3265 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3266 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3267
3268 /* last 4 bytes of key are the nonce! */
3269 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3270 /* exclude the nonce here */
3271 len -= CTR_RFC3686_NONCE_SIZE;
3272
3273 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3274 }
3275
safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm * tfm)3276 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3277 {
3278 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3279
3280 safexcel_skcipher_cra_init(tfm);
3281 ctx->alg = SAFEXCEL_SM4;
3282 ctx->blocksz = SM4_BLOCK_SIZE;
3283 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3284 return 0;
3285 }
3286
3287 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3288 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3289 .algo_mask = SAFEXCEL_ALG_SM4,
3290 .alg.skcipher = {
3291 .setkey = safexcel_skcipher_sm4ctr_setkey,
3292 .encrypt = safexcel_encrypt,
3293 .decrypt = safexcel_decrypt,
3294 /* Add nonce size */
3295 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3296 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3297 .ivsize = CTR_RFC3686_IV_SIZE,
3298 .base = {
3299 .cra_name = "rfc3686(ctr(sm4))",
3300 .cra_driver_name = "safexcel-ctr-sm4",
3301 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3302 .cra_flags = CRYPTO_ALG_ASYNC |
3303 CRYPTO_ALG_ALLOCATES_MEMORY |
3304 CRYPTO_ALG_KERN_DRIVER_ONLY,
3305 .cra_blocksize = 1,
3306 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3307 .cra_alignmask = 0,
3308 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3309 .cra_exit = safexcel_skcipher_cra_exit,
3310 .cra_module = THIS_MODULE,
3311 },
3312 },
3313 };
3314
safexcel_aead_sm4_blk_encrypt(struct aead_request * req)3315 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3316 {
3317 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3318 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3319 return -EINVAL;
3320
3321 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3322 SAFEXCEL_ENCRYPT);
3323 }
3324
safexcel_aead_sm4_blk_decrypt(struct aead_request * req)3325 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3326 {
3327 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3328
3329 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3330 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3331 return -EINVAL;
3332
3333 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3334 SAFEXCEL_DECRYPT);
3335 }
3336
safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm * tfm)3337 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3338 {
3339 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3340
3341 safexcel_aead_cra_init(tfm);
3342 ctx->alg = SAFEXCEL_SM4;
3343 ctx->blocksz = SM4_BLOCK_SIZE;
3344 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3345 ctx->state_sz = SHA1_DIGEST_SIZE;
3346 return 0;
3347 }
3348
3349 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3350 .type = SAFEXCEL_ALG_TYPE_AEAD,
3351 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3352 .alg.aead = {
3353 .setkey = safexcel_aead_setkey,
3354 .encrypt = safexcel_aead_sm4_blk_encrypt,
3355 .decrypt = safexcel_aead_sm4_blk_decrypt,
3356 .ivsize = SM4_BLOCK_SIZE,
3357 .maxauthsize = SHA1_DIGEST_SIZE,
3358 .base = {
3359 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3360 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3361 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3362 .cra_flags = CRYPTO_ALG_ASYNC |
3363 CRYPTO_ALG_ALLOCATES_MEMORY |
3364 CRYPTO_ALG_KERN_DRIVER_ONLY,
3365 .cra_blocksize = SM4_BLOCK_SIZE,
3366 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3367 .cra_alignmask = 0,
3368 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3369 .cra_exit = safexcel_aead_cra_exit,
3370 .cra_module = THIS_MODULE,
3371 },
3372 },
3373 };
3374
safexcel_aead_fallback_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3375 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3376 const u8 *key, unsigned int len)
3377 {
3378 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3379 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3380
3381 /* Keep fallback cipher synchronized */
3382 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3383 safexcel_aead_setkey(ctfm, key, len);
3384 }
3385
safexcel_aead_fallback_setauthsize(struct crypto_aead * ctfm,unsigned int authsize)3386 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3387 unsigned int authsize)
3388 {
3389 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3390 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3391
3392 /* Keep fallback cipher synchronized */
3393 return crypto_aead_setauthsize(ctx->fback, authsize);
3394 }
3395
safexcel_aead_fallback_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)3396 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3397 enum safexcel_cipher_direction dir)
3398 {
3399 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3400 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3401 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3402 struct aead_request *subreq = aead_request_ctx(req);
3403
3404 aead_request_set_tfm(subreq, ctx->fback);
3405 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3406 req->base.data);
3407 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3408 req->iv);
3409 aead_request_set_ad(subreq, req->assoclen);
3410
3411 return (dir == SAFEXCEL_ENCRYPT) ?
3412 crypto_aead_encrypt(subreq) :
3413 crypto_aead_decrypt(subreq);
3414 }
3415
safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request * req)3416 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3417 {
3418 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3419
3420 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3421 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3422 return -EINVAL;
3423 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3424 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3425
3426 /* HW cannot do full (AAD+payload) zero length, use fallback */
3427 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3428 }
3429
safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request * req)3430 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3431 {
3432 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3433 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3434
3435 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3436 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3437 return -EINVAL;
3438 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3439 /* If input length > 0 only */
3440 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3441
3442 /* HW cannot do full (AAD+payload) zero length, use fallback */
3443 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3444 }
3445
safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm * tfm)3446 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3447 {
3448 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3449
3450 safexcel_aead_fallback_cra_init(tfm);
3451 ctx->alg = SAFEXCEL_SM4;
3452 ctx->blocksz = SM4_BLOCK_SIZE;
3453 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3454 ctx->state_sz = SM3_DIGEST_SIZE;
3455 return 0;
3456 }
3457
3458 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3459 .type = SAFEXCEL_ALG_TYPE_AEAD,
3460 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3461 .alg.aead = {
3462 .setkey = safexcel_aead_fallback_setkey,
3463 .setauthsize = safexcel_aead_fallback_setauthsize,
3464 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3465 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3466 .ivsize = SM4_BLOCK_SIZE,
3467 .maxauthsize = SM3_DIGEST_SIZE,
3468 .base = {
3469 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3470 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3471 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3472 .cra_flags = CRYPTO_ALG_ASYNC |
3473 CRYPTO_ALG_ALLOCATES_MEMORY |
3474 CRYPTO_ALG_KERN_DRIVER_ONLY |
3475 CRYPTO_ALG_NEED_FALLBACK,
3476 .cra_blocksize = SM4_BLOCK_SIZE,
3477 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3478 .cra_alignmask = 0,
3479 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3480 .cra_exit = safexcel_aead_fallback_cra_exit,
3481 .cra_module = THIS_MODULE,
3482 },
3483 },
3484 };
3485
safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm * tfm)3486 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3487 {
3488 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3489
3490 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3491 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3492 return 0;
3493 }
3494
3495 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3496 .type = SAFEXCEL_ALG_TYPE_AEAD,
3497 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3498 .alg.aead = {
3499 .setkey = safexcel_aead_setkey,
3500 .encrypt = safexcel_aead_encrypt,
3501 .decrypt = safexcel_aead_decrypt,
3502 .ivsize = CTR_RFC3686_IV_SIZE,
3503 .maxauthsize = SHA1_DIGEST_SIZE,
3504 .base = {
3505 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3506 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3507 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3508 .cra_flags = CRYPTO_ALG_ASYNC |
3509 CRYPTO_ALG_ALLOCATES_MEMORY |
3510 CRYPTO_ALG_KERN_DRIVER_ONLY,
3511 .cra_blocksize = 1,
3512 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3513 .cra_alignmask = 0,
3514 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3515 .cra_exit = safexcel_aead_cra_exit,
3516 .cra_module = THIS_MODULE,
3517 },
3518 },
3519 };
3520
safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm * tfm)3521 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3522 {
3523 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3524
3525 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3526 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3527 return 0;
3528 }
3529
3530 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3531 .type = SAFEXCEL_ALG_TYPE_AEAD,
3532 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3533 .alg.aead = {
3534 .setkey = safexcel_aead_setkey,
3535 .encrypt = safexcel_aead_encrypt,
3536 .decrypt = safexcel_aead_decrypt,
3537 .ivsize = CTR_RFC3686_IV_SIZE,
3538 .maxauthsize = SM3_DIGEST_SIZE,
3539 .base = {
3540 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3541 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3542 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3543 .cra_flags = CRYPTO_ALG_ASYNC |
3544 CRYPTO_ALG_ALLOCATES_MEMORY |
3545 CRYPTO_ALG_KERN_DRIVER_ONLY,
3546 .cra_blocksize = 1,
3547 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3548 .cra_alignmask = 0,
3549 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3550 .cra_exit = safexcel_aead_cra_exit,
3551 .cra_module = THIS_MODULE,
3552 },
3553 },
3554 };
3555
safexcel_rfc4106_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3556 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3557 unsigned int len)
3558 {
3559 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3560 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3561
3562 /* last 4 bytes of key are the nonce! */
3563 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3564
3565 len -= CTR_RFC3686_NONCE_SIZE;
3566 return safexcel_aead_gcm_setkey(ctfm, key, len);
3567 }
3568
safexcel_rfc4106_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3569 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3570 unsigned int authsize)
3571 {
3572 return crypto_rfc4106_check_authsize(authsize);
3573 }
3574
safexcel_rfc4106_encrypt(struct aead_request * req)3575 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3576 {
3577 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3578 safexcel_aead_encrypt(req);
3579 }
3580
safexcel_rfc4106_decrypt(struct aead_request * req)3581 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3582 {
3583 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3584 safexcel_aead_decrypt(req);
3585 }
3586
safexcel_rfc4106_gcm_cra_init(struct crypto_tfm * tfm)3587 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3588 {
3589 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3590 int ret;
3591
3592 ret = safexcel_aead_gcm_cra_init(tfm);
3593 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3594 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3595 return ret;
3596 }
3597
3598 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3599 .type = SAFEXCEL_ALG_TYPE_AEAD,
3600 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3601 .alg.aead = {
3602 .setkey = safexcel_rfc4106_gcm_setkey,
3603 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3604 .encrypt = safexcel_rfc4106_encrypt,
3605 .decrypt = safexcel_rfc4106_decrypt,
3606 .ivsize = GCM_RFC4106_IV_SIZE,
3607 .maxauthsize = GHASH_DIGEST_SIZE,
3608 .base = {
3609 .cra_name = "rfc4106(gcm(aes))",
3610 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3611 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3612 .cra_flags = CRYPTO_ALG_ASYNC |
3613 CRYPTO_ALG_ALLOCATES_MEMORY |
3614 CRYPTO_ALG_KERN_DRIVER_ONLY,
3615 .cra_blocksize = 1,
3616 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3617 .cra_alignmask = 0,
3618 .cra_init = safexcel_rfc4106_gcm_cra_init,
3619 .cra_exit = safexcel_aead_gcm_cra_exit,
3620 },
3621 },
3622 };
3623
safexcel_rfc4543_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3624 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3625 unsigned int authsize)
3626 {
3627 if (authsize != GHASH_DIGEST_SIZE)
3628 return -EINVAL;
3629
3630 return 0;
3631 }
3632
safexcel_rfc4543_gcm_cra_init(struct crypto_tfm * tfm)3633 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3634 {
3635 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3636 int ret;
3637
3638 ret = safexcel_aead_gcm_cra_init(tfm);
3639 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3640 return ret;
3641 }
3642
3643 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3644 .type = SAFEXCEL_ALG_TYPE_AEAD,
3645 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3646 .alg.aead = {
3647 .setkey = safexcel_rfc4106_gcm_setkey,
3648 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3649 .encrypt = safexcel_rfc4106_encrypt,
3650 .decrypt = safexcel_rfc4106_decrypt,
3651 .ivsize = GCM_RFC4543_IV_SIZE,
3652 .maxauthsize = GHASH_DIGEST_SIZE,
3653 .base = {
3654 .cra_name = "rfc4543(gcm(aes))",
3655 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3656 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3657 .cra_flags = CRYPTO_ALG_ASYNC |
3658 CRYPTO_ALG_ALLOCATES_MEMORY |
3659 CRYPTO_ALG_KERN_DRIVER_ONLY,
3660 .cra_blocksize = 1,
3661 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3662 .cra_alignmask = 0,
3663 .cra_init = safexcel_rfc4543_gcm_cra_init,
3664 .cra_exit = safexcel_aead_gcm_cra_exit,
3665 },
3666 },
3667 };
3668
safexcel_rfc4309_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3669 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3670 unsigned int len)
3671 {
3672 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3673 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3674
3675 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3676 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3677 /* last 3 bytes of key are the nonce! */
3678 memcpy((u8 *)&ctx->nonce + 1, key + len -
3679 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3680 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3681
3682 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3683 return safexcel_aead_ccm_setkey(ctfm, key, len);
3684 }
3685
safexcel_rfc4309_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3686 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3687 unsigned int authsize)
3688 {
3689 /* Borrowed from crypto/ccm.c */
3690 switch (authsize) {
3691 case 8:
3692 case 12:
3693 case 16:
3694 break;
3695 default:
3696 return -EINVAL;
3697 }
3698
3699 return 0;
3700 }
3701
safexcel_rfc4309_ccm_encrypt(struct aead_request * req)3702 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3703 {
3704 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3705
3706 /* Borrowed from crypto/ccm.c */
3707 if (req->assoclen != 16 && req->assoclen != 20)
3708 return -EINVAL;
3709
3710 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3711 }
3712
safexcel_rfc4309_ccm_decrypt(struct aead_request * req)3713 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3714 {
3715 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3716
3717 /* Borrowed from crypto/ccm.c */
3718 if (req->assoclen != 16 && req->assoclen != 20)
3719 return -EINVAL;
3720
3721 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3722 }
3723
safexcel_rfc4309_ccm_cra_init(struct crypto_tfm * tfm)3724 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3725 {
3726 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3727 int ret;
3728
3729 ret = safexcel_aead_ccm_cra_init(tfm);
3730 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3731 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3732 return ret;
3733 }
3734
3735 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3736 .type = SAFEXCEL_ALG_TYPE_AEAD,
3737 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3738 .alg.aead = {
3739 .setkey = safexcel_rfc4309_ccm_setkey,
3740 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3741 .encrypt = safexcel_rfc4309_ccm_encrypt,
3742 .decrypt = safexcel_rfc4309_ccm_decrypt,
3743 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3744 .maxauthsize = AES_BLOCK_SIZE,
3745 .base = {
3746 .cra_name = "rfc4309(ccm(aes))",
3747 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3748 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3749 .cra_flags = CRYPTO_ALG_ASYNC |
3750 CRYPTO_ALG_ALLOCATES_MEMORY |
3751 CRYPTO_ALG_KERN_DRIVER_ONLY,
3752 .cra_blocksize = 1,
3753 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3754 .cra_alignmask = 0,
3755 .cra_init = safexcel_rfc4309_ccm_cra_init,
3756 .cra_exit = safexcel_aead_cra_exit,
3757 .cra_module = THIS_MODULE,
3758 },
3759 },
3760 };
3761