1 /*
2 * PCBC: Propagating Cipher Block Chaining mode
3 *
4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
5 * Written by David Howells (dhowells@redhat.com)
6 *
7 * Derived from cbc.c
8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16
17 #include <crypto/algapi.h>
18 #include <crypto/internal/skcipher.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/slab.h>
24 #include <linux/compiler.h>
25
26 struct crypto_pcbc_ctx {
27 struct crypto_cipher *child;
28 };
29
crypto_pcbc_setkey(struct crypto_skcipher * parent,const u8 * key,unsigned int keylen)30 static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
31 unsigned int keylen)
32 {
33 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
34 struct crypto_cipher *child = ctx->child;
35 int err;
36
37 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
38 crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
39 CRYPTO_TFM_REQ_MASK);
40 err = crypto_cipher_setkey(child, key, keylen);
41 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
42 CRYPTO_TFM_RES_MASK);
43 return err;
44 }
45
crypto_pcbc_encrypt_segment(struct skcipher_request * req,struct skcipher_walk * walk,struct crypto_cipher * tfm)46 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
47 struct skcipher_walk *walk,
48 struct crypto_cipher *tfm)
49 {
50 int bsize = crypto_cipher_blocksize(tfm);
51 unsigned int nbytes = walk->nbytes;
52 u8 *src = walk->src.virt.addr;
53 u8 *dst = walk->dst.virt.addr;
54 u8 *iv = walk->iv;
55
56 do {
57 crypto_xor(iv, src, bsize);
58 crypto_cipher_encrypt_one(tfm, dst, iv);
59 crypto_xor_cpy(iv, dst, src, bsize);
60
61 src += bsize;
62 dst += bsize;
63 } while ((nbytes -= bsize) >= bsize);
64
65 return nbytes;
66 }
67
crypto_pcbc_encrypt_inplace(struct skcipher_request * req,struct skcipher_walk * walk,struct crypto_cipher * tfm)68 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
69 struct skcipher_walk *walk,
70 struct crypto_cipher *tfm)
71 {
72 int bsize = crypto_cipher_blocksize(tfm);
73 unsigned int nbytes = walk->nbytes;
74 u8 *src = walk->src.virt.addr;
75 u8 *iv = walk->iv;
76 u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
77
78 do {
79 memcpy(tmpbuf, src, bsize);
80 crypto_xor(iv, src, bsize);
81 crypto_cipher_encrypt_one(tfm, src, iv);
82 crypto_xor_cpy(iv, tmpbuf, src, bsize);
83
84 src += bsize;
85 } while ((nbytes -= bsize) >= bsize);
86
87 memcpy(walk->iv, iv, bsize);
88
89 return nbytes;
90 }
91
crypto_pcbc_encrypt(struct skcipher_request * req)92 static int crypto_pcbc_encrypt(struct skcipher_request *req)
93 {
94 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
95 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
96 struct crypto_cipher *child = ctx->child;
97 struct skcipher_walk walk;
98 unsigned int nbytes;
99 int err;
100
101 err = skcipher_walk_virt(&walk, req, false);
102
103 while ((nbytes = walk.nbytes)) {
104 if (walk.src.virt.addr == walk.dst.virt.addr)
105 nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
106 child);
107 else
108 nbytes = crypto_pcbc_encrypt_segment(req, &walk,
109 child);
110 err = skcipher_walk_done(&walk, nbytes);
111 }
112
113 return err;
114 }
115
crypto_pcbc_decrypt_segment(struct skcipher_request * req,struct skcipher_walk * walk,struct crypto_cipher * tfm)116 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
117 struct skcipher_walk *walk,
118 struct crypto_cipher *tfm)
119 {
120 int bsize = crypto_cipher_blocksize(tfm);
121 unsigned int nbytes = walk->nbytes;
122 u8 *src = walk->src.virt.addr;
123 u8 *dst = walk->dst.virt.addr;
124 u8 *iv = walk->iv;
125
126 do {
127 crypto_cipher_decrypt_one(tfm, dst, src);
128 crypto_xor(dst, iv, bsize);
129 crypto_xor_cpy(iv, dst, src, bsize);
130
131 src += bsize;
132 dst += bsize;
133 } while ((nbytes -= bsize) >= bsize);
134
135 memcpy(walk->iv, iv, bsize);
136
137 return nbytes;
138 }
139
crypto_pcbc_decrypt_inplace(struct skcipher_request * req,struct skcipher_walk * walk,struct crypto_cipher * tfm)140 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
141 struct skcipher_walk *walk,
142 struct crypto_cipher *tfm)
143 {
144 int bsize = crypto_cipher_blocksize(tfm);
145 unsigned int nbytes = walk->nbytes;
146 u8 *src = walk->src.virt.addr;
147 u8 *iv = walk->iv;
148 u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
149
150 do {
151 memcpy(tmpbuf, src, bsize);
152 crypto_cipher_decrypt_one(tfm, src, src);
153 crypto_xor(src, iv, bsize);
154 crypto_xor_cpy(iv, src, tmpbuf, bsize);
155
156 src += bsize;
157 } while ((nbytes -= bsize) >= bsize);
158
159 memcpy(walk->iv, iv, bsize);
160
161 return nbytes;
162 }
163
crypto_pcbc_decrypt(struct skcipher_request * req)164 static int crypto_pcbc_decrypt(struct skcipher_request *req)
165 {
166 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
167 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
168 struct crypto_cipher *child = ctx->child;
169 struct skcipher_walk walk;
170 unsigned int nbytes;
171 int err;
172
173 err = skcipher_walk_virt(&walk, req, false);
174
175 while ((nbytes = walk.nbytes)) {
176 if (walk.src.virt.addr == walk.dst.virt.addr)
177 nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
178 child);
179 else
180 nbytes = crypto_pcbc_decrypt_segment(req, &walk,
181 child);
182 err = skcipher_walk_done(&walk, nbytes);
183 }
184
185 return err;
186 }
187
crypto_pcbc_init_tfm(struct crypto_skcipher * tfm)188 static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
189 {
190 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
191 struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
192 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
193 struct crypto_cipher *cipher;
194
195 cipher = crypto_spawn_cipher(spawn);
196 if (IS_ERR(cipher))
197 return PTR_ERR(cipher);
198
199 ctx->child = cipher;
200 return 0;
201 }
202
crypto_pcbc_exit_tfm(struct crypto_skcipher * tfm)203 static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
204 {
205 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
206
207 crypto_free_cipher(ctx->child);
208 }
209
crypto_pcbc_free(struct skcipher_instance * inst)210 static void crypto_pcbc_free(struct skcipher_instance *inst)
211 {
212 crypto_drop_skcipher(skcipher_instance_ctx(inst));
213 kfree(inst);
214 }
215
crypto_pcbc_create(struct crypto_template * tmpl,struct rtattr ** tb)216 static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
217 {
218 struct skcipher_instance *inst;
219 struct crypto_attr_type *algt;
220 struct crypto_spawn *spawn;
221 struct crypto_alg *alg;
222 int err;
223
224 algt = crypto_get_attr_type(tb);
225 if (IS_ERR(algt))
226 return PTR_ERR(algt);
227
228 if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
229 ~CRYPTO_ALG_INTERNAL)
230 return -EINVAL;
231
232 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
233 if (!inst)
234 return -ENOMEM;
235
236 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
237 (algt->type & CRYPTO_ALG_INTERNAL),
238 CRYPTO_ALG_TYPE_MASK |
239 (algt->mask & CRYPTO_ALG_INTERNAL));
240 err = PTR_ERR(alg);
241 if (IS_ERR(alg))
242 goto err_free_inst;
243
244 spawn = skcipher_instance_ctx(inst);
245 err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
246 CRYPTO_ALG_TYPE_MASK);
247 crypto_mod_put(alg);
248 if (err)
249 goto err_free_inst;
250
251 err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
252 if (err)
253 goto err_drop_spawn;
254
255 inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
256 inst->alg.base.cra_priority = alg->cra_priority;
257 inst->alg.base.cra_blocksize = alg->cra_blocksize;
258 inst->alg.base.cra_alignmask = alg->cra_alignmask;
259
260 inst->alg.ivsize = alg->cra_blocksize;
261 inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
262 inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
263
264 inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
265
266 inst->alg.init = crypto_pcbc_init_tfm;
267 inst->alg.exit = crypto_pcbc_exit_tfm;
268
269 inst->alg.setkey = crypto_pcbc_setkey;
270 inst->alg.encrypt = crypto_pcbc_encrypt;
271 inst->alg.decrypt = crypto_pcbc_decrypt;
272
273 inst->free = crypto_pcbc_free;
274
275 err = skcipher_register_instance(tmpl, inst);
276 if (err)
277 goto err_drop_spawn;
278
279 out:
280 return err;
281
282 err_drop_spawn:
283 crypto_drop_spawn(spawn);
284 err_free_inst:
285 kfree(inst);
286 goto out;
287 }
288
289 static struct crypto_template crypto_pcbc_tmpl = {
290 .name = "pcbc",
291 .create = crypto_pcbc_create,
292 .module = THIS_MODULE,
293 };
294
crypto_pcbc_module_init(void)295 static int __init crypto_pcbc_module_init(void)
296 {
297 return crypto_register_template(&crypto_pcbc_tmpl);
298 }
299
crypto_pcbc_module_exit(void)300 static void __exit crypto_pcbc_module_exit(void)
301 {
302 crypto_unregister_template(&crypto_pcbc_tmpl);
303 }
304
305 module_init(crypto_pcbc_module_init);
306 module_exit(crypto_pcbc_module_exit);
307
308 MODULE_LICENSE("GPL");
309 MODULE_DESCRIPTION("PCBC block cipher algorithm");
310 MODULE_ALIAS_CRYPTO("pcbc");
311