1 /**
2  * GHASH routines supporting VMX instructions on the Power 8
3  *
4  * Copyright (C) 2015 International Business Machines Inc.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; version 2 only.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program; if not, write to the Free Software
17  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18  *
19  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
20  */
21 
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/ghash.h>
30 #include <crypto/scatterwalk.h>
31 #include <crypto/internal/hash.h>
32 #include <crypto/b128ops.h>
33 
34 #define IN_INTERRUPT in_interrupt()
35 
36 void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
37 void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
38 void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
39 		  const u8 *in, size_t len);
40 
41 struct p8_ghash_ctx {
42 	u128 htable[16];
43 	struct crypto_shash *fallback;
44 };
45 
46 struct p8_ghash_desc_ctx {
47 	u64 shash[2];
48 	u8 buffer[GHASH_DIGEST_SIZE];
49 	int bytes;
50 	struct shash_desc fallback_desc;
51 };
52 
p8_ghash_init_tfm(struct crypto_tfm * tfm)53 static int p8_ghash_init_tfm(struct crypto_tfm *tfm)
54 {
55 	const char *alg = "ghash-generic";
56 	struct crypto_shash *fallback;
57 	struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm);
58 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
59 
60 	fallback = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
61 	if (IS_ERR(fallback)) {
62 		printk(KERN_ERR
63 		       "Failed to allocate transformation for '%s': %ld\n",
64 		       alg, PTR_ERR(fallback));
65 		return PTR_ERR(fallback);
66 	}
67 
68 	crypto_shash_set_flags(fallback,
69 			       crypto_shash_get_flags((struct crypto_shash
70 						       *) tfm));
71 
72 	/* Check if the descsize defined in the algorithm is still enough. */
73 	if (shash_tfm->descsize < sizeof(struct p8_ghash_desc_ctx)
74 	    + crypto_shash_descsize(fallback)) {
75 		printk(KERN_ERR
76 		       "Desc size of the fallback implementation (%s) does not match the expected value: %lu vs %u\n",
77 		       alg,
78 		       shash_tfm->descsize - sizeof(struct p8_ghash_desc_ctx),
79 		       crypto_shash_descsize(fallback));
80 		return -EINVAL;
81 	}
82 	ctx->fallback = fallback;
83 
84 	return 0;
85 }
86 
p8_ghash_exit_tfm(struct crypto_tfm * tfm)87 static void p8_ghash_exit_tfm(struct crypto_tfm *tfm)
88 {
89 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
90 
91 	if (ctx->fallback) {
92 		crypto_free_shash(ctx->fallback);
93 		ctx->fallback = NULL;
94 	}
95 }
96 
p8_ghash_init(struct shash_desc * desc)97 static int p8_ghash_init(struct shash_desc *desc)
98 {
99 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
100 	struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
101 
102 	dctx->bytes = 0;
103 	memset(dctx->shash, 0, GHASH_DIGEST_SIZE);
104 	dctx->fallback_desc.tfm = ctx->fallback;
105 	dctx->fallback_desc.flags = desc->flags;
106 	return crypto_shash_init(&dctx->fallback_desc);
107 }
108 
p8_ghash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)109 static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
110 			   unsigned int keylen)
111 {
112 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
113 
114 	if (keylen != GHASH_BLOCK_SIZE)
115 		return -EINVAL;
116 
117 	preempt_disable();
118 	pagefault_disable();
119 	enable_kernel_vsx();
120 	gcm_init_p8(ctx->htable, (const u64 *) key);
121 	disable_kernel_vsx();
122 	pagefault_enable();
123 	preempt_enable();
124 	return crypto_shash_setkey(ctx->fallback, key, keylen);
125 }
126 
p8_ghash_update(struct shash_desc * desc,const u8 * src,unsigned int srclen)127 static int p8_ghash_update(struct shash_desc *desc,
128 			   const u8 *src, unsigned int srclen)
129 {
130 	unsigned int len;
131 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
132 	struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
133 
134 	if (IN_INTERRUPT) {
135 		return crypto_shash_update(&dctx->fallback_desc, src,
136 					   srclen);
137 	} else {
138 		if (dctx->bytes) {
139 			if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
140 				memcpy(dctx->buffer + dctx->bytes, src,
141 				       srclen);
142 				dctx->bytes += srclen;
143 				return 0;
144 			}
145 			memcpy(dctx->buffer + dctx->bytes, src,
146 			       GHASH_DIGEST_SIZE - dctx->bytes);
147 			preempt_disable();
148 			pagefault_disable();
149 			enable_kernel_vsx();
150 			gcm_ghash_p8(dctx->shash, ctx->htable,
151 				     dctx->buffer, GHASH_DIGEST_SIZE);
152 			disable_kernel_vsx();
153 			pagefault_enable();
154 			preempt_enable();
155 			src += GHASH_DIGEST_SIZE - dctx->bytes;
156 			srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
157 			dctx->bytes = 0;
158 		}
159 		len = srclen & ~(GHASH_DIGEST_SIZE - 1);
160 		if (len) {
161 			preempt_disable();
162 			pagefault_disable();
163 			enable_kernel_vsx();
164 			gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
165 			disable_kernel_vsx();
166 			pagefault_enable();
167 			preempt_enable();
168 			src += len;
169 			srclen -= len;
170 		}
171 		if (srclen) {
172 			memcpy(dctx->buffer, src, srclen);
173 			dctx->bytes = srclen;
174 		}
175 		return 0;
176 	}
177 }
178 
p8_ghash_final(struct shash_desc * desc,u8 * out)179 static int p8_ghash_final(struct shash_desc *desc, u8 *out)
180 {
181 	int i;
182 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
183 	struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
184 
185 	if (IN_INTERRUPT) {
186 		return crypto_shash_final(&dctx->fallback_desc, out);
187 	} else {
188 		if (dctx->bytes) {
189 			for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
190 				dctx->buffer[i] = 0;
191 			preempt_disable();
192 			pagefault_disable();
193 			enable_kernel_vsx();
194 			gcm_ghash_p8(dctx->shash, ctx->htable,
195 				     dctx->buffer, GHASH_DIGEST_SIZE);
196 			disable_kernel_vsx();
197 			pagefault_enable();
198 			preempt_enable();
199 			dctx->bytes = 0;
200 		}
201 		memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
202 		return 0;
203 	}
204 }
205 
206 struct shash_alg p8_ghash_alg = {
207 	.digestsize = GHASH_DIGEST_SIZE,
208 	.init = p8_ghash_init,
209 	.update = p8_ghash_update,
210 	.final = p8_ghash_final,
211 	.setkey = p8_ghash_setkey,
212 	.descsize = sizeof(struct p8_ghash_desc_ctx)
213 		+ sizeof(struct ghash_desc_ctx),
214 	.base = {
215 		 .cra_name = "ghash",
216 		 .cra_driver_name = "p8_ghash",
217 		 .cra_priority = 1000,
218 		 .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
219 		 .cra_blocksize = GHASH_BLOCK_SIZE,
220 		 .cra_ctxsize = sizeof(struct p8_ghash_ctx),
221 		 .cra_module = THIS_MODULE,
222 		 .cra_init = p8_ghash_init_tfm,
223 		 .cra_exit = p8_ghash_exit_tfm,
224 	},
225 };
226