1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * AEAD: Authenticated Encryption with Associated Data
4 *
5 * This file provides API support for AEAD algorithms.
6 *
7 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
8 */
9
10 #include <crypto/internal/aead.h>
11 #include <linux/cryptouser.h>
12 #include <linux/errno.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/seq_file.h>
18 #include <linux/string.h>
19 #include <net/netlink.h>
20
21 #include "internal.h"
22
aead_get_stat(struct aead_alg * alg)23 static inline struct crypto_istat_aead *aead_get_stat(struct aead_alg *alg)
24 {
25 #ifdef CONFIG_CRYPTO_STATS
26 return &alg->stat;
27 #else
28 return NULL;
29 #endif
30 }
31
setkey_unaligned(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)32 static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
33 unsigned int keylen)
34 {
35 unsigned long alignmask = crypto_aead_alignmask(tfm);
36 int ret;
37 u8 *buffer, *alignbuffer;
38 unsigned long absize;
39
40 absize = keylen + alignmask;
41 buffer = kmalloc(absize, GFP_ATOMIC);
42 if (!buffer)
43 return -ENOMEM;
44
45 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
46 memcpy(alignbuffer, key, keylen);
47 ret = crypto_aead_alg(tfm)->setkey(tfm, alignbuffer, keylen);
48 memset(alignbuffer, 0, keylen);
49 kfree(buffer);
50 return ret;
51 }
52
crypto_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)53 int crypto_aead_setkey(struct crypto_aead *tfm,
54 const u8 *key, unsigned int keylen)
55 {
56 unsigned long alignmask = crypto_aead_alignmask(tfm);
57 int err;
58
59 if ((unsigned long)key & alignmask)
60 err = setkey_unaligned(tfm, key, keylen);
61 else
62 err = crypto_aead_alg(tfm)->setkey(tfm, key, keylen);
63
64 if (unlikely(err)) {
65 crypto_aead_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
66 return err;
67 }
68
69 crypto_aead_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
70 return 0;
71 }
72 EXPORT_SYMBOL_GPL(crypto_aead_setkey);
73
crypto_aead_setauthsize(struct crypto_aead * tfm,unsigned int authsize)74 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
75 {
76 int err;
77
78 if ((!authsize && crypto_aead_maxauthsize(tfm)) ||
79 authsize > crypto_aead_maxauthsize(tfm))
80 return -EINVAL;
81
82 if (crypto_aead_alg(tfm)->setauthsize) {
83 err = crypto_aead_alg(tfm)->setauthsize(tfm, authsize);
84 if (err)
85 return err;
86 }
87
88 tfm->authsize = authsize;
89 return 0;
90 }
91 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
92
crypto_aead_errstat(struct crypto_istat_aead * istat,int err)93 static inline int crypto_aead_errstat(struct crypto_istat_aead *istat, int err)
94 {
95 if (!IS_ENABLED(CONFIG_CRYPTO_STATS))
96 return err;
97
98 if (err && err != -EINPROGRESS && err != -EBUSY)
99 atomic64_inc(&istat->err_cnt);
100
101 return err;
102 }
103
crypto_aead_encrypt(struct aead_request * req)104 int crypto_aead_encrypt(struct aead_request *req)
105 {
106 struct crypto_aead *aead = crypto_aead_reqtfm(req);
107 struct aead_alg *alg = crypto_aead_alg(aead);
108 struct crypto_istat_aead *istat;
109 int ret;
110
111 istat = aead_get_stat(alg);
112
113 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
114 atomic64_inc(&istat->encrypt_cnt);
115 atomic64_add(req->cryptlen, &istat->encrypt_tlen);
116 }
117
118 if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY)
119 ret = -ENOKEY;
120 else
121 ret = alg->encrypt(req);
122
123 return crypto_aead_errstat(istat, ret);
124 }
125 EXPORT_SYMBOL_GPL(crypto_aead_encrypt);
126
crypto_aead_decrypt(struct aead_request * req)127 int crypto_aead_decrypt(struct aead_request *req)
128 {
129 struct crypto_aead *aead = crypto_aead_reqtfm(req);
130 struct aead_alg *alg = crypto_aead_alg(aead);
131 struct crypto_istat_aead *istat;
132 int ret;
133
134 istat = aead_get_stat(alg);
135
136 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
137 atomic64_inc(&istat->encrypt_cnt);
138 atomic64_add(req->cryptlen, &istat->encrypt_tlen);
139 }
140
141 if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY)
142 ret = -ENOKEY;
143 else if (req->cryptlen < crypto_aead_authsize(aead))
144 ret = -EINVAL;
145 else
146 ret = alg->decrypt(req);
147
148 return crypto_aead_errstat(istat, ret);
149 }
150 EXPORT_SYMBOL_GPL(crypto_aead_decrypt);
151
crypto_aead_exit_tfm(struct crypto_tfm * tfm)152 static void crypto_aead_exit_tfm(struct crypto_tfm *tfm)
153 {
154 struct crypto_aead *aead = __crypto_aead_cast(tfm);
155 struct aead_alg *alg = crypto_aead_alg(aead);
156
157 alg->exit(aead);
158 }
159
crypto_aead_init_tfm(struct crypto_tfm * tfm)160 static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
161 {
162 struct crypto_aead *aead = __crypto_aead_cast(tfm);
163 struct aead_alg *alg = crypto_aead_alg(aead);
164
165 crypto_aead_set_flags(aead, CRYPTO_TFM_NEED_KEY);
166
167 aead->authsize = alg->maxauthsize;
168
169 if (alg->exit)
170 aead->base.exit = crypto_aead_exit_tfm;
171
172 if (alg->init)
173 return alg->init(aead);
174
175 return 0;
176 }
177
crypto_aead_report(struct sk_buff * skb,struct crypto_alg * alg)178 static int __maybe_unused crypto_aead_report(
179 struct sk_buff *skb, struct crypto_alg *alg)
180 {
181 struct crypto_report_aead raead;
182 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
183
184 memset(&raead, 0, sizeof(raead));
185
186 strscpy(raead.type, "aead", sizeof(raead.type));
187 strscpy(raead.geniv, "<none>", sizeof(raead.geniv));
188
189 raead.blocksize = alg->cra_blocksize;
190 raead.maxauthsize = aead->maxauthsize;
191 raead.ivsize = aead->ivsize;
192
193 return nla_put(skb, CRYPTOCFGA_REPORT_AEAD, sizeof(raead), &raead);
194 }
195
196 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
197 __maybe_unused;
crypto_aead_show(struct seq_file * m,struct crypto_alg * alg)198 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
199 {
200 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
201
202 seq_printf(m, "type : aead\n");
203 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
204 "yes" : "no");
205 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
206 seq_printf(m, "ivsize : %u\n", aead->ivsize);
207 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
208 seq_printf(m, "geniv : <none>\n");
209 }
210
crypto_aead_free_instance(struct crypto_instance * inst)211 static void crypto_aead_free_instance(struct crypto_instance *inst)
212 {
213 struct aead_instance *aead = aead_instance(inst);
214
215 aead->free(aead);
216 }
217
crypto_aead_report_stat(struct sk_buff * skb,struct crypto_alg * alg)218 static int __maybe_unused crypto_aead_report_stat(
219 struct sk_buff *skb, struct crypto_alg *alg)
220 {
221 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
222 struct crypto_istat_aead *istat = aead_get_stat(aead);
223 struct crypto_stat_aead raead;
224
225 memset(&raead, 0, sizeof(raead));
226
227 strscpy(raead.type, "aead", sizeof(raead.type));
228
229 raead.stat_encrypt_cnt = atomic64_read(&istat->encrypt_cnt);
230 raead.stat_encrypt_tlen = atomic64_read(&istat->encrypt_tlen);
231 raead.stat_decrypt_cnt = atomic64_read(&istat->decrypt_cnt);
232 raead.stat_decrypt_tlen = atomic64_read(&istat->decrypt_tlen);
233 raead.stat_err_cnt = atomic64_read(&istat->err_cnt);
234
235 return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
236 }
237
238 static const struct crypto_type crypto_aead_type = {
239 .extsize = crypto_alg_extsize,
240 .init_tfm = crypto_aead_init_tfm,
241 .free = crypto_aead_free_instance,
242 #ifdef CONFIG_PROC_FS
243 .show = crypto_aead_show,
244 #endif
245 #if IS_ENABLED(CONFIG_CRYPTO_USER)
246 .report = crypto_aead_report,
247 #endif
248 #ifdef CONFIG_CRYPTO_STATS
249 .report_stat = crypto_aead_report_stat,
250 #endif
251 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
252 .maskset = CRYPTO_ALG_TYPE_MASK,
253 .type = CRYPTO_ALG_TYPE_AEAD,
254 .tfmsize = offsetof(struct crypto_aead, base),
255 };
256
crypto_grab_aead(struct crypto_aead_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)257 int crypto_grab_aead(struct crypto_aead_spawn *spawn,
258 struct crypto_instance *inst,
259 const char *name, u32 type, u32 mask)
260 {
261 spawn->base.frontend = &crypto_aead_type;
262 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
263 }
264 EXPORT_SYMBOL_GPL(crypto_grab_aead);
265
crypto_alloc_aead(const char * alg_name,u32 type,u32 mask)266 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
267 {
268 return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
269 }
270 EXPORT_SYMBOL_GPL(crypto_alloc_aead);
271
aead_prepare_alg(struct aead_alg * alg)272 static int aead_prepare_alg(struct aead_alg *alg)
273 {
274 struct crypto_istat_aead *istat = aead_get_stat(alg);
275 struct crypto_alg *base = &alg->base;
276
277 if (max3(alg->maxauthsize, alg->ivsize, alg->chunksize) >
278 PAGE_SIZE / 8)
279 return -EINVAL;
280
281 if (!alg->chunksize)
282 alg->chunksize = base->cra_blocksize;
283
284 base->cra_type = &crypto_aead_type;
285 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
286 base->cra_flags |= CRYPTO_ALG_TYPE_AEAD;
287
288 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
289 memset(istat, 0, sizeof(*istat));
290
291 return 0;
292 }
293
crypto_register_aead(struct aead_alg * alg)294 int crypto_register_aead(struct aead_alg *alg)
295 {
296 struct crypto_alg *base = &alg->base;
297 int err;
298
299 err = aead_prepare_alg(alg);
300 if (err)
301 return err;
302
303 return crypto_register_alg(base);
304 }
305 EXPORT_SYMBOL_GPL(crypto_register_aead);
306
crypto_unregister_aead(struct aead_alg * alg)307 void crypto_unregister_aead(struct aead_alg *alg)
308 {
309 crypto_unregister_alg(&alg->base);
310 }
311 EXPORT_SYMBOL_GPL(crypto_unregister_aead);
312
crypto_register_aeads(struct aead_alg * algs,int count)313 int crypto_register_aeads(struct aead_alg *algs, int count)
314 {
315 int i, ret;
316
317 for (i = 0; i < count; i++) {
318 ret = crypto_register_aead(&algs[i]);
319 if (ret)
320 goto err;
321 }
322
323 return 0;
324
325 err:
326 for (--i; i >= 0; --i)
327 crypto_unregister_aead(&algs[i]);
328
329 return ret;
330 }
331 EXPORT_SYMBOL_GPL(crypto_register_aeads);
332
crypto_unregister_aeads(struct aead_alg * algs,int count)333 void crypto_unregister_aeads(struct aead_alg *algs, int count)
334 {
335 int i;
336
337 for (i = count - 1; i >= 0; --i)
338 crypto_unregister_aead(&algs[i]);
339 }
340 EXPORT_SYMBOL_GPL(crypto_unregister_aeads);
341
aead_register_instance(struct crypto_template * tmpl,struct aead_instance * inst)342 int aead_register_instance(struct crypto_template *tmpl,
343 struct aead_instance *inst)
344 {
345 int err;
346
347 if (WARN_ON(!inst->free))
348 return -EINVAL;
349
350 err = aead_prepare_alg(&inst->alg);
351 if (err)
352 return err;
353
354 return crypto_register_instance(tmpl, aead_crypto_instance(inst));
355 }
356 EXPORT_SYMBOL_GPL(aead_register_instance);
357
358 MODULE_LICENSE("GPL");
359 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");
360