1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /**
3 * AMCC SoC PPC4xx Crypto Driver
4 *
5 * Copyright (c) 2008 Applied Micro Circuits Corporation.
6 * All rights reserved. James Hsiao <jhsiao@amcc.com>
7 *
8 * This is the header file for AMCC Crypto offload Linux device driver for
9 * use with Linux CryptoAPI.
10
11 */
12
13 #ifndef __CRYPTO4XX_CORE_H__
14 #define __CRYPTO4XX_CORE_H__
15
16 #include <linux/ratelimit.h>
17 #include <linux/mutex.h>
18 #include <crypto/internal/hash.h>
19 #include <crypto/internal/aead.h>
20 #include <crypto/internal/rng.h>
21 #include <crypto/internal/skcipher.h>
22 #include "crypto4xx_reg_def.h"
23 #include "crypto4xx_sa.h"
24
25 #define PPC460SX_SDR0_SRST 0x201
26 #define PPC405EX_SDR0_SRST 0x200
27 #define PPC460EX_SDR0_SRST 0x201
28 #define PPC460EX_CE_RESET 0x08000000
29 #define PPC460SX_CE_RESET 0x20000000
30 #define PPC405EX_CE_RESET 0x00000008
31
32 #define CRYPTO4XX_CRYPTO_PRIORITY 300
33 #define PPC4XX_NUM_PD 256
34 #define PPC4XX_LAST_PD (PPC4XX_NUM_PD - 1)
35 #define PPC4XX_NUM_GD 1024
36 #define PPC4XX_LAST_GD (PPC4XX_NUM_GD - 1)
37 #define PPC4XX_NUM_SD 256
38 #define PPC4XX_LAST_SD (PPC4XX_NUM_SD - 1)
39 #define PPC4XX_SD_BUFFER_SIZE 2048
40
41 #define PD_ENTRY_BUSY BIT(1)
42 #define PD_ENTRY_INUSE BIT(0)
43 #define PD_ENTRY_FREE 0
44 #define ERING_WAS_FULL 0xffffffff
45
46 struct crypto4xx_device;
47
48 union shadow_sa_buf {
49 struct dynamic_sa_ctl sa;
50
51 /* alloc 256 bytes which is enough for any kind of dynamic sa */
52 u8 buf[256];
53 } __packed;
54
55 struct pd_uinfo {
56 struct crypto4xx_device *dev;
57 u32 state;
58 u32 first_gd; /* first gather discriptor
59 used by this packet */
60 u32 num_gd; /* number of gather discriptor
61 used by this packet */
62 u32 first_sd; /* first scatter discriptor
63 used by this packet */
64 u32 num_sd; /* number of scatter discriptors
65 used by this packet */
66 struct dynamic_sa_ctl *sa_va; /* shadow sa */
67 struct sa_state_record *sr_va; /* state record for shadow sa */
68 u32 sr_pa;
69 struct scatterlist *dest_va;
70 struct crypto_async_request *async_req; /* base crypto request
71 for this packet */
72 };
73
74 struct crypto4xx_device {
75 struct crypto4xx_core_device *core_dev;
76 void __iomem *ce_base;
77 void __iomem *trng_base;
78
79 struct ce_pd *pdr; /* base address of packet descriptor ring */
80 dma_addr_t pdr_pa; /* physical address of pdr_base_register */
81 struct ce_gd *gdr; /* gather descriptor ring */
82 dma_addr_t gdr_pa; /* physical address of gdr_base_register */
83 struct ce_sd *sdr; /* scatter descriptor ring */
84 dma_addr_t sdr_pa; /* physical address of sdr_base_register */
85 void *scatter_buffer_va;
86 dma_addr_t scatter_buffer_pa;
87
88 union shadow_sa_buf *shadow_sa_pool;
89 dma_addr_t shadow_sa_pool_pa;
90 struct sa_state_record *shadow_sr_pool;
91 dma_addr_t shadow_sr_pool_pa;
92 u32 pdr_tail;
93 u32 pdr_head;
94 u32 gdr_tail;
95 u32 gdr_head;
96 u32 sdr_tail;
97 u32 sdr_head;
98 struct pd_uinfo *pdr_uinfo;
99 struct list_head alg_list; /* List of algorithm supported
100 by this device */
101 struct ratelimit_state aead_ratelimit;
102 bool is_revb;
103 };
104
105 struct crypto4xx_core_device {
106 struct device *device;
107 struct platform_device *ofdev;
108 struct crypto4xx_device *dev;
109 struct hwrng *trng;
110 u32 int_status;
111 u32 irq;
112 struct tasklet_struct tasklet;
113 spinlock_t lock;
114 struct mutex rng_lock;
115 };
116
117 struct crypto4xx_ctx {
118 struct crypto4xx_device *dev;
119 struct dynamic_sa_ctl *sa_in;
120 struct dynamic_sa_ctl *sa_out;
121 __le32 iv_nonce;
122 u32 sa_len;
123 union {
124 struct crypto_sync_skcipher *cipher;
125 struct crypto_aead *aead;
126 } sw_cipher;
127 };
128
129 struct crypto4xx_aead_reqctx {
130 struct scatterlist dst[2];
131 };
132
133 struct crypto4xx_alg_common {
134 u32 type;
135 union {
136 struct skcipher_alg cipher;
137 struct ahash_alg hash;
138 struct aead_alg aead;
139 struct rng_alg rng;
140 } u;
141 };
142
143 struct crypto4xx_alg {
144 struct list_head entry;
145 struct crypto4xx_alg_common alg;
146 struct crypto4xx_device *dev;
147 };
148
149 int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
150 void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
151 void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
152 int crypto4xx_build_pd(struct crypto_async_request *req,
153 struct crypto4xx_ctx *ctx,
154 struct scatterlist *src,
155 struct scatterlist *dst,
156 const unsigned int datalen,
157 const __le32 *iv, const u32 iv_len,
158 const struct dynamic_sa_ctl *sa,
159 const unsigned int sa_len,
160 const unsigned int assoclen,
161 struct scatterlist *dst_tmp);
162 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
163 const u8 *key, unsigned int keylen);
164 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
165 const u8 *key, unsigned int keylen);
166 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
167 const u8 *key, unsigned int keylen);
168 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
169 const u8 *key, unsigned int keylen);
170 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
171 const u8 *key, unsigned int keylen);
172 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
173 const u8 *key, unsigned int keylen);
174 int crypto4xx_encrypt_ctr(struct skcipher_request *req);
175 int crypto4xx_decrypt_ctr(struct skcipher_request *req);
176 int crypto4xx_encrypt_iv_stream(struct skcipher_request *req);
177 int crypto4xx_decrypt_iv_stream(struct skcipher_request *req);
178 int crypto4xx_encrypt_iv_block(struct skcipher_request *req);
179 int crypto4xx_decrypt_iv_block(struct skcipher_request *req);
180 int crypto4xx_encrypt_noiv_block(struct skcipher_request *req);
181 int crypto4xx_decrypt_noiv_block(struct skcipher_request *req);
182 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req);
183 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req);
184 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm);
185 int crypto4xx_hash_digest(struct ahash_request *req);
186 int crypto4xx_hash_final(struct ahash_request *req);
187 int crypto4xx_hash_update(struct ahash_request *req);
188 int crypto4xx_hash_init(struct ahash_request *req);
189
190 /**
191 * Note: Only use this function to copy items that is word aligned.
192 */
crypto4xx_memcpy_swab32(u32 * dst,const void * buf,size_t len)193 static inline void crypto4xx_memcpy_swab32(u32 *dst, const void *buf,
194 size_t len)
195 {
196 for (; len >= 4; buf += 4, len -= 4)
197 *dst++ = __swab32p((u32 *) buf);
198
199 if (len) {
200 const u8 *tmp = (u8 *)buf;
201
202 switch (len) {
203 case 3:
204 *dst = (tmp[2] << 16) |
205 (tmp[1] << 8) |
206 tmp[0];
207 break;
208 case 2:
209 *dst = (tmp[1] << 8) |
210 tmp[0];
211 break;
212 case 1:
213 *dst = tmp[0];
214 break;
215 default:
216 break;
217 }
218 }
219 }
220
crypto4xx_memcpy_from_le32(u32 * dst,const void * buf,size_t len)221 static inline void crypto4xx_memcpy_from_le32(u32 *dst, const void *buf,
222 size_t len)
223 {
224 crypto4xx_memcpy_swab32(dst, buf, len);
225 }
226
crypto4xx_memcpy_to_le32(__le32 * dst,const void * buf,size_t len)227 static inline void crypto4xx_memcpy_to_le32(__le32 *dst, const void *buf,
228 size_t len)
229 {
230 crypto4xx_memcpy_swab32((u32 *)dst, buf, len);
231 }
232
233 int crypto4xx_setauthsize_aead(struct crypto_aead *ciper,
234 unsigned int authsize);
235 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher,
236 const u8 *key, unsigned int keylen);
237 int crypto4xx_encrypt_aes_ccm(struct aead_request *req);
238 int crypto4xx_decrypt_aes_ccm(struct aead_request *req);
239 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
240 const u8 *key, unsigned int keylen);
241 int crypto4xx_encrypt_aes_gcm(struct aead_request *req);
242 int crypto4xx_decrypt_aes_gcm(struct aead_request *req);
243
244 #endif
245