1 /*
2  * Scatterlist Cryptographic API.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9  * and Nettle, by Niels Möller.
10  *
11  * This program is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License as published by the Free
13  * Software Foundation; either version 2 of the License, or (at your option)
14  * any later version.
15  *
16  */
17 
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched/signal.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include <linux/completion.h>
28 #include "internal.h"
29 
30 LIST_HEAD(crypto_alg_list);
31 EXPORT_SYMBOL_GPL(crypto_alg_list);
32 DECLARE_RWSEM(crypto_alg_sem);
33 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34 
35 BLOCKING_NOTIFIER_HEAD(crypto_chain);
36 EXPORT_SYMBOL_GPL(crypto_chain);
37 
38 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
39 
crypto_mod_get(struct crypto_alg * alg)40 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
41 {
42 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
43 }
44 EXPORT_SYMBOL_GPL(crypto_mod_get);
45 
crypto_mod_put(struct crypto_alg * alg)46 void crypto_mod_put(struct crypto_alg *alg)
47 {
48 	struct module *module = alg->cra_module;
49 
50 	crypto_alg_put(alg);
51 	module_put(module);
52 }
53 EXPORT_SYMBOL_GPL(crypto_mod_put);
54 
crypto_is_test_larval(struct crypto_larval * larval)55 static inline int crypto_is_test_larval(struct crypto_larval *larval)
56 {
57 	return larval->alg.cra_driver_name[0];
58 }
59 
__crypto_alg_lookup(const char * name,u32 type,u32 mask)60 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
61 					      u32 mask)
62 {
63 	struct crypto_alg *q, *alg = NULL;
64 	int best = -2;
65 
66 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
67 		int exact, fuzzy;
68 
69 		if (crypto_is_moribund(q))
70 			continue;
71 
72 		if ((q->cra_flags ^ type) & mask)
73 			continue;
74 
75 		if (crypto_is_larval(q) &&
76 		    !crypto_is_test_larval((struct crypto_larval *)q) &&
77 		    ((struct crypto_larval *)q)->mask != mask)
78 			continue;
79 
80 		exact = !strcmp(q->cra_driver_name, name);
81 		fuzzy = !strcmp(q->cra_name, name);
82 		if (!exact && !(fuzzy && q->cra_priority > best))
83 			continue;
84 
85 		if (unlikely(!crypto_mod_get(q)))
86 			continue;
87 
88 		best = q->cra_priority;
89 		if (alg)
90 			crypto_mod_put(alg);
91 		alg = q;
92 
93 		if (exact)
94 			break;
95 	}
96 
97 	return alg;
98 }
99 
crypto_larval_destroy(struct crypto_alg * alg)100 static void crypto_larval_destroy(struct crypto_alg *alg)
101 {
102 	struct crypto_larval *larval = (void *)alg;
103 
104 	BUG_ON(!crypto_is_larval(alg));
105 	if (larval->adult)
106 		crypto_mod_put(larval->adult);
107 	kfree(larval);
108 }
109 
crypto_larval_alloc(const char * name,u32 type,u32 mask)110 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
111 {
112 	struct crypto_larval *larval;
113 
114 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
115 	if (!larval)
116 		return ERR_PTR(-ENOMEM);
117 
118 	larval->mask = mask;
119 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
120 	larval->alg.cra_priority = -1;
121 	larval->alg.cra_destroy = crypto_larval_destroy;
122 
123 	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 	init_completion(&larval->completion);
125 
126 	return larval;
127 }
128 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
129 
crypto_larval_add(const char * name,u32 type,u32 mask)130 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
131 					    u32 mask)
132 {
133 	struct crypto_alg *alg;
134 	struct crypto_larval *larval;
135 
136 	larval = crypto_larval_alloc(name, type, mask);
137 	if (IS_ERR(larval))
138 		return ERR_CAST(larval);
139 
140 	refcount_set(&larval->alg.cra_refcnt, 2);
141 
142 	down_write(&crypto_alg_sem);
143 	alg = __crypto_alg_lookup(name, type, mask);
144 	if (!alg) {
145 		alg = &larval->alg;
146 		list_add(&alg->cra_list, &crypto_alg_list);
147 	}
148 	up_write(&crypto_alg_sem);
149 
150 	if (alg != &larval->alg) {
151 		kfree(larval);
152 		if (crypto_is_larval(alg))
153 			alg = crypto_larval_wait(alg);
154 	}
155 
156 	return alg;
157 }
158 
crypto_larval_kill(struct crypto_alg * alg)159 void crypto_larval_kill(struct crypto_alg *alg)
160 {
161 	struct crypto_larval *larval = (void *)alg;
162 
163 	down_write(&crypto_alg_sem);
164 	list_del(&alg->cra_list);
165 	up_write(&crypto_alg_sem);
166 	complete_all(&larval->completion);
167 	crypto_alg_put(alg);
168 }
169 EXPORT_SYMBOL_GPL(crypto_larval_kill);
170 
crypto_larval_wait(struct crypto_alg * alg)171 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
172 {
173 	struct crypto_larval *larval = (void *)alg;
174 	long timeout;
175 
176 	timeout = wait_for_completion_killable_timeout(
177 		&larval->completion, 60 * HZ);
178 
179 	alg = larval->adult;
180 	if (timeout < 0)
181 		alg = ERR_PTR(-EINTR);
182 	else if (!timeout)
183 		alg = ERR_PTR(-ETIMEDOUT);
184 	else if (!alg)
185 		alg = ERR_PTR(-ENOENT);
186 	else if (crypto_is_test_larval(larval) &&
187 		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 		alg = ERR_PTR(-EAGAIN);
189 	else if (!crypto_mod_get(alg))
190 		alg = ERR_PTR(-EAGAIN);
191 	crypto_mod_put(&larval->alg);
192 
193 	return alg;
194 }
195 
crypto_alg_lookup(const char * name,u32 type,u32 mask)196 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
197 					    u32 mask)
198 {
199 	struct crypto_alg *alg;
200 	u32 test = 0;
201 
202 	if (!((type | mask) & CRYPTO_ALG_TESTED))
203 		test |= CRYPTO_ALG_TESTED;
204 
205 	down_read(&crypto_alg_sem);
206 	alg = __crypto_alg_lookup(name, type | test, mask | test);
207 	if (!alg && test) {
208 		alg = __crypto_alg_lookup(name, type, mask);
209 		if (alg && !crypto_is_larval(alg)) {
210 			/* Test failed */
211 			crypto_mod_put(alg);
212 			alg = ERR_PTR(-ELIBBAD);
213 		}
214 	}
215 	up_read(&crypto_alg_sem);
216 
217 	return alg;
218 }
219 
crypto_larval_lookup(const char * name,u32 type,u32 mask)220 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
221 					       u32 mask)
222 {
223 	struct crypto_alg *alg;
224 
225 	if (!name)
226 		return ERR_PTR(-ENOENT);
227 
228 	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
229 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
230 
231 	alg = crypto_alg_lookup(name, type, mask);
232 	if (!alg && !(mask & CRYPTO_NOLOAD)) {
233 		request_module("crypto-%s", name);
234 
235 		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
236 		      CRYPTO_ALG_NEED_FALLBACK))
237 			request_module("crypto-%s-all", name);
238 
239 		alg = crypto_alg_lookup(name, type, mask);
240 	}
241 
242 	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
243 		alg = crypto_larval_wait(alg);
244 	else if (!alg)
245 		alg = crypto_larval_add(name, type, mask);
246 
247 	return alg;
248 }
249 
crypto_probing_notify(unsigned long val,void * v)250 int crypto_probing_notify(unsigned long val, void *v)
251 {
252 	int ok;
253 
254 	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
255 	if (ok == NOTIFY_DONE) {
256 		request_module("cryptomgr");
257 		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
258 	}
259 
260 	return ok;
261 }
262 EXPORT_SYMBOL_GPL(crypto_probing_notify);
263 
crypto_alg_mod_lookup(const char * name,u32 type,u32 mask)264 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
265 {
266 	struct crypto_alg *alg;
267 	struct crypto_alg *larval;
268 	int ok;
269 
270 	/*
271 	 * If the internal flag is set for a cipher, require a caller to
272 	 * to invoke the cipher with the internal flag to use that cipher.
273 	 * Also, if a caller wants to allocate a cipher that may or may
274 	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
275 	 * !(mask & CRYPTO_ALG_INTERNAL).
276 	 */
277 	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
278 		mask |= CRYPTO_ALG_INTERNAL;
279 
280 	larval = crypto_larval_lookup(name, type, mask);
281 	if (IS_ERR(larval) || !crypto_is_larval(larval))
282 		return larval;
283 
284 	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
285 
286 	if (ok == NOTIFY_STOP)
287 		alg = crypto_larval_wait(larval);
288 	else {
289 		crypto_mod_put(larval);
290 		alg = ERR_PTR(-ENOENT);
291 	}
292 	crypto_larval_kill(larval);
293 	return alg;
294 }
295 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
296 
crypto_init_ops(struct crypto_tfm * tfm,u32 type,u32 mask)297 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
298 {
299 	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
300 
301 	if (type_obj)
302 		return type_obj->init(tfm, type, mask);
303 
304 	switch (crypto_tfm_alg_type(tfm)) {
305 	case CRYPTO_ALG_TYPE_CIPHER:
306 		return crypto_init_cipher_ops(tfm);
307 
308 	case CRYPTO_ALG_TYPE_COMPRESS:
309 		return crypto_init_compress_ops(tfm);
310 
311 	default:
312 		break;
313 	}
314 
315 	BUG();
316 	return -EINVAL;
317 }
318 
crypto_exit_ops(struct crypto_tfm * tfm)319 static void crypto_exit_ops(struct crypto_tfm *tfm)
320 {
321 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
322 
323 	if (type && tfm->exit)
324 		tfm->exit(tfm);
325 }
326 
crypto_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)327 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
328 {
329 	const struct crypto_type *type_obj = alg->cra_type;
330 	unsigned int len;
331 
332 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
333 	if (type_obj)
334 		return len + type_obj->ctxsize(alg, type, mask);
335 
336 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
337 	default:
338 		BUG();
339 
340 	case CRYPTO_ALG_TYPE_CIPHER:
341 		len += crypto_cipher_ctxsize(alg);
342 		break;
343 
344 	case CRYPTO_ALG_TYPE_COMPRESS:
345 		len += crypto_compress_ctxsize(alg);
346 		break;
347 	}
348 
349 	return len;
350 }
351 
crypto_shoot_alg(struct crypto_alg * alg)352 void crypto_shoot_alg(struct crypto_alg *alg)
353 {
354 	down_write(&crypto_alg_sem);
355 	alg->cra_flags |= CRYPTO_ALG_DYING;
356 	up_write(&crypto_alg_sem);
357 }
358 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
359 
__crypto_alloc_tfm(struct crypto_alg * alg,u32 type,u32 mask)360 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
361 				      u32 mask)
362 {
363 	struct crypto_tfm *tfm = NULL;
364 	unsigned int tfm_size;
365 	int err = -ENOMEM;
366 
367 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
368 	tfm = kzalloc(tfm_size, GFP_KERNEL);
369 	if (tfm == NULL)
370 		goto out_err;
371 
372 	tfm->__crt_alg = alg;
373 
374 	err = crypto_init_ops(tfm, type, mask);
375 	if (err)
376 		goto out_free_tfm;
377 
378 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
379 		goto cra_init_failed;
380 
381 	goto out;
382 
383 cra_init_failed:
384 	crypto_exit_ops(tfm);
385 out_free_tfm:
386 	if (err == -EAGAIN)
387 		crypto_shoot_alg(alg);
388 	kfree(tfm);
389 out_err:
390 	tfm = ERR_PTR(err);
391 out:
392 	return tfm;
393 }
394 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
395 
396 /*
397  *	crypto_alloc_base - Locate algorithm and allocate transform
398  *	@alg_name: Name of algorithm
399  *	@type: Type of algorithm
400  *	@mask: Mask for type comparison
401  *
402  *	This function should not be used by new algorithm types.
403  *	Please use crypto_alloc_tfm instead.
404  *
405  *	crypto_alloc_base() will first attempt to locate an already loaded
406  *	algorithm.  If that fails and the kernel supports dynamically loadable
407  *	modules, it will then attempt to load a module of the same name or
408  *	alias.  If that fails it will send a query to any loaded crypto manager
409  *	to construct an algorithm on the fly.  A refcount is grabbed on the
410  *	algorithm which is then associated with the new transform.
411  *
412  *	The returned transform is of a non-determinate type.  Most people
413  *	should use one of the more specific allocation functions such as
414  *	crypto_alloc_blkcipher.
415  *
416  *	In case of error the return value is an error pointer.
417  */
crypto_alloc_base(const char * alg_name,u32 type,u32 mask)418 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
419 {
420 	struct crypto_tfm *tfm;
421 	int err;
422 
423 	for (;;) {
424 		struct crypto_alg *alg;
425 
426 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
427 		if (IS_ERR(alg)) {
428 			err = PTR_ERR(alg);
429 			goto err;
430 		}
431 
432 		tfm = __crypto_alloc_tfm(alg, type, mask);
433 		if (!IS_ERR(tfm))
434 			return tfm;
435 
436 		crypto_mod_put(alg);
437 		err = PTR_ERR(tfm);
438 
439 err:
440 		if (err != -EAGAIN)
441 			break;
442 		if (fatal_signal_pending(current)) {
443 			err = -EINTR;
444 			break;
445 		}
446 	}
447 
448 	return ERR_PTR(err);
449 }
450 EXPORT_SYMBOL_GPL(crypto_alloc_base);
451 
crypto_create_tfm(struct crypto_alg * alg,const struct crypto_type * frontend)452 void *crypto_create_tfm(struct crypto_alg *alg,
453 			const struct crypto_type *frontend)
454 {
455 	char *mem;
456 	struct crypto_tfm *tfm = NULL;
457 	unsigned int tfmsize;
458 	unsigned int total;
459 	int err = -ENOMEM;
460 
461 	tfmsize = frontend->tfmsize;
462 	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
463 
464 	mem = kzalloc(total, GFP_KERNEL);
465 	if (mem == NULL)
466 		goto out_err;
467 
468 	tfm = (struct crypto_tfm *)(mem + tfmsize);
469 	tfm->__crt_alg = alg;
470 
471 	err = frontend->init_tfm(tfm);
472 	if (err)
473 		goto out_free_tfm;
474 
475 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
476 		goto cra_init_failed;
477 
478 	goto out;
479 
480 cra_init_failed:
481 	crypto_exit_ops(tfm);
482 out_free_tfm:
483 	if (err == -EAGAIN)
484 		crypto_shoot_alg(alg);
485 	kfree(mem);
486 out_err:
487 	mem = ERR_PTR(err);
488 out:
489 	return mem;
490 }
491 EXPORT_SYMBOL_GPL(crypto_create_tfm);
492 
crypto_find_alg(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)493 struct crypto_alg *crypto_find_alg(const char *alg_name,
494 				   const struct crypto_type *frontend,
495 				   u32 type, u32 mask)
496 {
497 	if (frontend) {
498 		type &= frontend->maskclear;
499 		mask &= frontend->maskclear;
500 		type |= frontend->type;
501 		mask |= frontend->maskset;
502 	}
503 
504 	return crypto_alg_mod_lookup(alg_name, type, mask);
505 }
506 EXPORT_SYMBOL_GPL(crypto_find_alg);
507 
508 /*
509  *	crypto_alloc_tfm - Locate algorithm and allocate transform
510  *	@alg_name: Name of algorithm
511  *	@frontend: Frontend algorithm type
512  *	@type: Type of algorithm
513  *	@mask: Mask for type comparison
514  *
515  *	crypto_alloc_tfm() will first attempt to locate an already loaded
516  *	algorithm.  If that fails and the kernel supports dynamically loadable
517  *	modules, it will then attempt to load a module of the same name or
518  *	alias.  If that fails it will send a query to any loaded crypto manager
519  *	to construct an algorithm on the fly.  A refcount is grabbed on the
520  *	algorithm which is then associated with the new transform.
521  *
522  *	The returned transform is of a non-determinate type.  Most people
523  *	should use one of the more specific allocation functions such as
524  *	crypto_alloc_blkcipher.
525  *
526  *	In case of error the return value is an error pointer.
527  */
crypto_alloc_tfm(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)528 void *crypto_alloc_tfm(const char *alg_name,
529 		       const struct crypto_type *frontend, u32 type, u32 mask)
530 {
531 	void *tfm;
532 	int err;
533 
534 	for (;;) {
535 		struct crypto_alg *alg;
536 
537 		alg = crypto_find_alg(alg_name, frontend, type, mask);
538 		if (IS_ERR(alg)) {
539 			err = PTR_ERR(alg);
540 			goto err;
541 		}
542 
543 		tfm = crypto_create_tfm(alg, frontend);
544 		if (!IS_ERR(tfm))
545 			return tfm;
546 
547 		crypto_mod_put(alg);
548 		err = PTR_ERR(tfm);
549 
550 err:
551 		if (err != -EAGAIN)
552 			break;
553 		if (fatal_signal_pending(current)) {
554 			err = -EINTR;
555 			break;
556 		}
557 	}
558 
559 	return ERR_PTR(err);
560 }
561 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
562 
563 /*
564  *	crypto_destroy_tfm - Free crypto transform
565  *	@mem: Start of tfm slab
566  *	@tfm: Transform to free
567  *
568  *	This function frees up the transform and any associated resources,
569  *	then drops the refcount on the associated algorithm.
570  */
crypto_destroy_tfm(void * mem,struct crypto_tfm * tfm)571 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
572 {
573 	struct crypto_alg *alg;
574 
575 	if (unlikely(!mem))
576 		return;
577 
578 	alg = tfm->__crt_alg;
579 
580 	if (!tfm->exit && alg->cra_exit)
581 		alg->cra_exit(tfm);
582 	crypto_exit_ops(tfm);
583 	crypto_mod_put(alg);
584 	kzfree(mem);
585 }
586 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
587 
crypto_has_alg(const char * name,u32 type,u32 mask)588 int crypto_has_alg(const char *name, u32 type, u32 mask)
589 {
590 	int ret = 0;
591 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
592 
593 	if (!IS_ERR(alg)) {
594 		crypto_mod_put(alg);
595 		ret = 1;
596 	}
597 
598 	return ret;
599 }
600 EXPORT_SYMBOL_GPL(crypto_has_alg);
601 
crypto_req_done(struct crypto_async_request * req,int err)602 void crypto_req_done(struct crypto_async_request *req, int err)
603 {
604 	struct crypto_wait *wait = req->data;
605 
606 	if (err == -EINPROGRESS)
607 		return;
608 
609 	wait->err = err;
610 	complete(&wait->completion);
611 }
612 EXPORT_SYMBOL_GPL(crypto_req_done);
613 
614 MODULE_DESCRIPTION("Cryptographic core API");
615 MODULE_LICENSE("GPL");
616