1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Speck: a lightweight block cipher
4  *
5  * Copyright (c) 2018 Google, Inc
6  *
7  * Speck has 10 variants, including 5 block sizes.  For now we only implement
8  * the variants Speck128/128, Speck128/192, Speck128/256, Speck64/96, and
9  * Speck64/128.   Speck${B}/${K} denotes the variant with a block size of B bits
10  * and a key size of K bits.  The Speck128 variants are believed to be the most
11  * secure variants, and they use the same block size and key sizes as AES.  The
12  * Speck64 variants are less secure, but on 32-bit processors are usually
13  * faster.  The remaining variants (Speck32, Speck48, and Speck96) are even less
14  * secure and/or not as well suited for implementation on either 32-bit or
15  * 64-bit processors, so are omitted.
16  *
17  * Reference: "The Simon and Speck Families of Lightweight Block Ciphers"
18  * https://eprint.iacr.org/2013/404.pdf
19  *
20  * In a correspondence, the Speck designers have also clarified that the words
21  * should be interpreted in little-endian format, and the words should be
22  * ordered such that the first word of each block is 'y' rather than 'x', and
23  * the first key word (rather than the last) becomes the first round key.
24  */
25 
26 #include <asm/unaligned.h>
27 #include <crypto/speck.h>
28 #include <linux/bitops.h>
29 #include <linux/crypto.h>
30 #include <linux/init.h>
31 #include <linux/module.h>
32 
33 /* Speck128 */
34 
speck128_round(u64 * x,u64 * y,u64 k)35 static __always_inline void speck128_round(u64 *x, u64 *y, u64 k)
36 {
37 	*x = ror64(*x, 8);
38 	*x += *y;
39 	*x ^= k;
40 	*y = rol64(*y, 3);
41 	*y ^= *x;
42 }
43 
speck128_unround(u64 * x,u64 * y,u64 k)44 static __always_inline void speck128_unround(u64 *x, u64 *y, u64 k)
45 {
46 	*y ^= *x;
47 	*y = ror64(*y, 3);
48 	*x ^= k;
49 	*x -= *y;
50 	*x = rol64(*x, 8);
51 }
52 
crypto_speck128_encrypt(const struct speck128_tfm_ctx * ctx,u8 * out,const u8 * in)53 void crypto_speck128_encrypt(const struct speck128_tfm_ctx *ctx,
54 			     u8 *out, const u8 *in)
55 {
56 	u64 y = get_unaligned_le64(in);
57 	u64 x = get_unaligned_le64(in + 8);
58 	int i;
59 
60 	for (i = 0; i < ctx->nrounds; i++)
61 		speck128_round(&x, &y, ctx->round_keys[i]);
62 
63 	put_unaligned_le64(y, out);
64 	put_unaligned_le64(x, out + 8);
65 }
66 EXPORT_SYMBOL_GPL(crypto_speck128_encrypt);
67 
speck128_encrypt(struct crypto_tfm * tfm,u8 * out,const u8 * in)68 static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
69 {
70 	crypto_speck128_encrypt(crypto_tfm_ctx(tfm), out, in);
71 }
72 
crypto_speck128_decrypt(const struct speck128_tfm_ctx * ctx,u8 * out,const u8 * in)73 void crypto_speck128_decrypt(const struct speck128_tfm_ctx *ctx,
74 			     u8 *out, const u8 *in)
75 {
76 	u64 y = get_unaligned_le64(in);
77 	u64 x = get_unaligned_le64(in + 8);
78 	int i;
79 
80 	for (i = ctx->nrounds - 1; i >= 0; i--)
81 		speck128_unround(&x, &y, ctx->round_keys[i]);
82 
83 	put_unaligned_le64(y, out);
84 	put_unaligned_le64(x, out + 8);
85 }
86 EXPORT_SYMBOL_GPL(crypto_speck128_decrypt);
87 
speck128_decrypt(struct crypto_tfm * tfm,u8 * out,const u8 * in)88 static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
89 {
90 	crypto_speck128_decrypt(crypto_tfm_ctx(tfm), out, in);
91 }
92 
crypto_speck128_setkey(struct speck128_tfm_ctx * ctx,const u8 * key,unsigned int keylen)93 int crypto_speck128_setkey(struct speck128_tfm_ctx *ctx, const u8 *key,
94 			   unsigned int keylen)
95 {
96 	u64 l[3];
97 	u64 k;
98 	int i;
99 
100 	switch (keylen) {
101 	case SPECK128_128_KEY_SIZE:
102 		k = get_unaligned_le64(key);
103 		l[0] = get_unaligned_le64(key + 8);
104 		ctx->nrounds = SPECK128_128_NROUNDS;
105 		for (i = 0; i < ctx->nrounds; i++) {
106 			ctx->round_keys[i] = k;
107 			speck128_round(&l[0], &k, i);
108 		}
109 		break;
110 	case SPECK128_192_KEY_SIZE:
111 		k = get_unaligned_le64(key);
112 		l[0] = get_unaligned_le64(key + 8);
113 		l[1] = get_unaligned_le64(key + 16);
114 		ctx->nrounds = SPECK128_192_NROUNDS;
115 		for (i = 0; i < ctx->nrounds; i++) {
116 			ctx->round_keys[i] = k;
117 			speck128_round(&l[i % 2], &k, i);
118 		}
119 		break;
120 	case SPECK128_256_KEY_SIZE:
121 		k = get_unaligned_le64(key);
122 		l[0] = get_unaligned_le64(key + 8);
123 		l[1] = get_unaligned_le64(key + 16);
124 		l[2] = get_unaligned_le64(key + 24);
125 		ctx->nrounds = SPECK128_256_NROUNDS;
126 		for (i = 0; i < ctx->nrounds; i++) {
127 			ctx->round_keys[i] = k;
128 			speck128_round(&l[i % 3], &k, i);
129 		}
130 		break;
131 	default:
132 		return -EINVAL;
133 	}
134 
135 	return 0;
136 }
137 EXPORT_SYMBOL_GPL(crypto_speck128_setkey);
138 
speck128_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)139 static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key,
140 			   unsigned int keylen)
141 {
142 	return crypto_speck128_setkey(crypto_tfm_ctx(tfm), key, keylen);
143 }
144 
145 /* Speck64 */
146 
speck64_round(u32 * x,u32 * y,u32 k)147 static __always_inline void speck64_round(u32 *x, u32 *y, u32 k)
148 {
149 	*x = ror32(*x, 8);
150 	*x += *y;
151 	*x ^= k;
152 	*y = rol32(*y, 3);
153 	*y ^= *x;
154 }
155 
speck64_unround(u32 * x,u32 * y,u32 k)156 static __always_inline void speck64_unround(u32 *x, u32 *y, u32 k)
157 {
158 	*y ^= *x;
159 	*y = ror32(*y, 3);
160 	*x ^= k;
161 	*x -= *y;
162 	*x = rol32(*x, 8);
163 }
164 
crypto_speck64_encrypt(const struct speck64_tfm_ctx * ctx,u8 * out,const u8 * in)165 void crypto_speck64_encrypt(const struct speck64_tfm_ctx *ctx,
166 			    u8 *out, const u8 *in)
167 {
168 	u32 y = get_unaligned_le32(in);
169 	u32 x = get_unaligned_le32(in + 4);
170 	int i;
171 
172 	for (i = 0; i < ctx->nrounds; i++)
173 		speck64_round(&x, &y, ctx->round_keys[i]);
174 
175 	put_unaligned_le32(y, out);
176 	put_unaligned_le32(x, out + 4);
177 }
178 EXPORT_SYMBOL_GPL(crypto_speck64_encrypt);
179 
speck64_encrypt(struct crypto_tfm * tfm,u8 * out,const u8 * in)180 static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
181 {
182 	crypto_speck64_encrypt(crypto_tfm_ctx(tfm), out, in);
183 }
184 
crypto_speck64_decrypt(const struct speck64_tfm_ctx * ctx,u8 * out,const u8 * in)185 void crypto_speck64_decrypt(const struct speck64_tfm_ctx *ctx,
186 			    u8 *out, const u8 *in)
187 {
188 	u32 y = get_unaligned_le32(in);
189 	u32 x = get_unaligned_le32(in + 4);
190 	int i;
191 
192 	for (i = ctx->nrounds - 1; i >= 0; i--)
193 		speck64_unround(&x, &y, ctx->round_keys[i]);
194 
195 	put_unaligned_le32(y, out);
196 	put_unaligned_le32(x, out + 4);
197 }
198 EXPORT_SYMBOL_GPL(crypto_speck64_decrypt);
199 
speck64_decrypt(struct crypto_tfm * tfm,u8 * out,const u8 * in)200 static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
201 {
202 	crypto_speck64_decrypt(crypto_tfm_ctx(tfm), out, in);
203 }
204 
crypto_speck64_setkey(struct speck64_tfm_ctx * ctx,const u8 * key,unsigned int keylen)205 int crypto_speck64_setkey(struct speck64_tfm_ctx *ctx, const u8 *key,
206 			  unsigned int keylen)
207 {
208 	u32 l[3];
209 	u32 k;
210 	int i;
211 
212 	switch (keylen) {
213 	case SPECK64_96_KEY_SIZE:
214 		k = get_unaligned_le32(key);
215 		l[0] = get_unaligned_le32(key + 4);
216 		l[1] = get_unaligned_le32(key + 8);
217 		ctx->nrounds = SPECK64_96_NROUNDS;
218 		for (i = 0; i < ctx->nrounds; i++) {
219 			ctx->round_keys[i] = k;
220 			speck64_round(&l[i % 2], &k, i);
221 		}
222 		break;
223 	case SPECK64_128_KEY_SIZE:
224 		k = get_unaligned_le32(key);
225 		l[0] = get_unaligned_le32(key + 4);
226 		l[1] = get_unaligned_le32(key + 8);
227 		l[2] = get_unaligned_le32(key + 12);
228 		ctx->nrounds = SPECK64_128_NROUNDS;
229 		for (i = 0; i < ctx->nrounds; i++) {
230 			ctx->round_keys[i] = k;
231 			speck64_round(&l[i % 3], &k, i);
232 		}
233 		break;
234 	default:
235 		return -EINVAL;
236 	}
237 
238 	return 0;
239 }
240 EXPORT_SYMBOL_GPL(crypto_speck64_setkey);
241 
speck64_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)242 static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key,
243 			  unsigned int keylen)
244 {
245 	return crypto_speck64_setkey(crypto_tfm_ctx(tfm), key, keylen);
246 }
247 
248 /* Algorithm definitions */
249 
250 static struct crypto_alg speck_algs[] = {
251 	{
252 		.cra_name		= "speck128",
253 		.cra_driver_name	= "speck128-generic",
254 		.cra_priority		= 100,
255 		.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
256 		.cra_blocksize		= SPECK128_BLOCK_SIZE,
257 		.cra_ctxsize		= sizeof(struct speck128_tfm_ctx),
258 		.cra_module		= THIS_MODULE,
259 		.cra_u			= {
260 			.cipher = {
261 				.cia_min_keysize	= SPECK128_128_KEY_SIZE,
262 				.cia_max_keysize	= SPECK128_256_KEY_SIZE,
263 				.cia_setkey		= speck128_setkey,
264 				.cia_encrypt		= speck128_encrypt,
265 				.cia_decrypt		= speck128_decrypt
266 			}
267 		}
268 	}, {
269 		.cra_name		= "speck64",
270 		.cra_driver_name	= "speck64-generic",
271 		.cra_priority		= 100,
272 		.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
273 		.cra_blocksize		= SPECK64_BLOCK_SIZE,
274 		.cra_ctxsize		= sizeof(struct speck64_tfm_ctx),
275 		.cra_module		= THIS_MODULE,
276 		.cra_u			= {
277 			.cipher = {
278 				.cia_min_keysize	= SPECK64_96_KEY_SIZE,
279 				.cia_max_keysize	= SPECK64_128_KEY_SIZE,
280 				.cia_setkey		= speck64_setkey,
281 				.cia_encrypt		= speck64_encrypt,
282 				.cia_decrypt		= speck64_decrypt
283 			}
284 		}
285 	}
286 };
287 
speck_module_init(void)288 static int __init speck_module_init(void)
289 {
290 	return crypto_register_algs(speck_algs, ARRAY_SIZE(speck_algs));
291 }
292 
speck_module_exit(void)293 static void __exit speck_module_exit(void)
294 {
295 	crypto_unregister_algs(speck_algs, ARRAY_SIZE(speck_algs));
296 }
297 
298 module_init(speck_module_init);
299 module_exit(speck_module_exit);
300 
301 MODULE_DESCRIPTION("Speck block cipher (generic)");
302 MODULE_LICENSE("GPL");
303 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
304 MODULE_ALIAS_CRYPTO("speck128");
305 MODULE_ALIAS_CRYPTO("speck128-generic");
306 MODULE_ALIAS_CRYPTO("speck64");
307 MODULE_ALIAS_CRYPTO("speck64-generic");
308