1 | // SPDX-License-Identifier: GPL-2.0-only |
2 | /* |
3 | * pcrypt - Parallel crypto wrapper. |
4 | * |
5 | * Copyright (C) 2009 secunet Security Networks AG |
6 | * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com> |
7 | */ |
8 | |
9 | #include <crypto/algapi.h> |
10 | #include <crypto/internal/aead.h> |
11 | #include <linux/atomic.h> |
12 | #include <linux/err.h> |
13 | #include <linux/init.h> |
14 | #include <linux/module.h> |
15 | #include <linux/slab.h> |
16 | #include <linux/kobject.h> |
17 | #include <linux/cpu.h> |
18 | #include <crypto/pcrypt.h> |
19 | |
20 | static struct padata_instance *pencrypt; |
21 | static struct padata_instance *pdecrypt; |
22 | static struct kset *pcrypt_kset; |
23 | |
24 | struct pcrypt_instance_ctx { |
25 | struct crypto_aead_spawn spawn; |
26 | struct padata_shell *psenc; |
27 | struct padata_shell *psdec; |
28 | atomic_t tfm_count; |
29 | }; |
30 | |
31 | struct pcrypt_aead_ctx { |
32 | struct crypto_aead *child; |
33 | unsigned int cb_cpu; |
34 | }; |
35 | |
36 | static inline struct pcrypt_instance_ctx *pcrypt_tfm_ictx( |
37 | struct crypto_aead *tfm) |
38 | { |
39 | return aead_instance_ctx(inst: aead_alg_instance(aead: tfm)); |
40 | } |
41 | |
42 | static int pcrypt_aead_setkey(struct crypto_aead *parent, |
43 | const u8 *key, unsigned int keylen) |
44 | { |
45 | struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm: parent); |
46 | |
47 | return crypto_aead_setkey(tfm: ctx->child, key, keylen); |
48 | } |
49 | |
50 | static int pcrypt_aead_setauthsize(struct crypto_aead *parent, |
51 | unsigned int authsize) |
52 | { |
53 | struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm: parent); |
54 | |
55 | return crypto_aead_setauthsize(tfm: ctx->child, authsize); |
56 | } |
57 | |
58 | static void pcrypt_aead_serial(struct padata_priv *padata) |
59 | { |
60 | struct pcrypt_request *preq = pcrypt_padata_request(padata); |
61 | struct aead_request *req = pcrypt_request_ctx(req: preq); |
62 | |
63 | aead_request_complete(req: req->base.data, err: padata->info); |
64 | } |
65 | |
66 | static void pcrypt_aead_done(void *data, int err) |
67 | { |
68 | struct aead_request *req = data; |
69 | struct pcrypt_request *preq = aead_request_ctx(req); |
70 | struct padata_priv *padata = pcrypt_request_padata(req: preq); |
71 | |
72 | padata->info = err; |
73 | |
74 | padata_do_serial(padata); |
75 | } |
76 | |
77 | static void pcrypt_aead_enc(struct padata_priv *padata) |
78 | { |
79 | struct pcrypt_request *preq = pcrypt_padata_request(padata); |
80 | struct aead_request *req = pcrypt_request_ctx(req: preq); |
81 | int ret; |
82 | |
83 | ret = crypto_aead_encrypt(req); |
84 | |
85 | if (ret == -EINPROGRESS) |
86 | return; |
87 | |
88 | padata->info = ret; |
89 | padata_do_serial(padata); |
90 | } |
91 | |
92 | static int pcrypt_aead_encrypt(struct aead_request *req) |
93 | { |
94 | int err; |
95 | struct pcrypt_request *preq = aead_request_ctx(req); |
96 | struct aead_request *creq = pcrypt_request_ctx(req: preq); |
97 | struct padata_priv *padata = pcrypt_request_padata(req: preq); |
98 | struct crypto_aead *aead = crypto_aead_reqtfm(req); |
99 | struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm: aead); |
100 | u32 flags = aead_request_flags(req); |
101 | struct pcrypt_instance_ctx *ictx; |
102 | |
103 | ictx = pcrypt_tfm_ictx(tfm: aead); |
104 | |
105 | memset(padata, 0, sizeof(struct padata_priv)); |
106 | |
107 | padata->parallel = pcrypt_aead_enc; |
108 | padata->serial = pcrypt_aead_serial; |
109 | |
110 | aead_request_set_tfm(req: creq, tfm: ctx->child); |
111 | aead_request_set_callback(req: creq, flags: flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, |
112 | compl: pcrypt_aead_done, data: req); |
113 | aead_request_set_crypt(req: creq, src: req->src, dst: req->dst, |
114 | cryptlen: req->cryptlen, iv: req->iv); |
115 | aead_request_set_ad(req: creq, assoclen: req->assoclen); |
116 | |
117 | err = padata_do_parallel(ps: ictx->psenc, padata, cb_cpu: &ctx->cb_cpu); |
118 | if (!err) |
119 | return -EINPROGRESS; |
120 | if (err == -EBUSY) |
121 | return -EAGAIN; |
122 | |
123 | return err; |
124 | } |
125 | |
126 | static void pcrypt_aead_dec(struct padata_priv *padata) |
127 | { |
128 | struct pcrypt_request *preq = pcrypt_padata_request(padata); |
129 | struct aead_request *req = pcrypt_request_ctx(req: preq); |
130 | int ret; |
131 | |
132 | ret = crypto_aead_decrypt(req); |
133 | |
134 | if (ret == -EINPROGRESS) |
135 | return; |
136 | |
137 | padata->info = ret; |
138 | padata_do_serial(padata); |
139 | } |
140 | |
141 | static int pcrypt_aead_decrypt(struct aead_request *req) |
142 | { |
143 | int err; |
144 | struct pcrypt_request *preq = aead_request_ctx(req); |
145 | struct aead_request *creq = pcrypt_request_ctx(req: preq); |
146 | struct padata_priv *padata = pcrypt_request_padata(req: preq); |
147 | struct crypto_aead *aead = crypto_aead_reqtfm(req); |
148 | struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm: aead); |
149 | u32 flags = aead_request_flags(req); |
150 | struct pcrypt_instance_ctx *ictx; |
151 | |
152 | ictx = pcrypt_tfm_ictx(tfm: aead); |
153 | |
154 | memset(padata, 0, sizeof(struct padata_priv)); |
155 | |
156 | padata->parallel = pcrypt_aead_dec; |
157 | padata->serial = pcrypt_aead_serial; |
158 | |
159 | aead_request_set_tfm(req: creq, tfm: ctx->child); |
160 | aead_request_set_callback(req: creq, flags: flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, |
161 | compl: pcrypt_aead_done, data: req); |
162 | aead_request_set_crypt(req: creq, src: req->src, dst: req->dst, |
163 | cryptlen: req->cryptlen, iv: req->iv); |
164 | aead_request_set_ad(req: creq, assoclen: req->assoclen); |
165 | |
166 | err = padata_do_parallel(ps: ictx->psdec, padata, cb_cpu: &ctx->cb_cpu); |
167 | if (!err) |
168 | return -EINPROGRESS; |
169 | if (err == -EBUSY) |
170 | return -EAGAIN; |
171 | |
172 | return err; |
173 | } |
174 | |
175 | static int pcrypt_aead_init_tfm(struct crypto_aead *tfm) |
176 | { |
177 | int cpu, cpu_index; |
178 | struct aead_instance *inst = aead_alg_instance(aead: tfm); |
179 | struct pcrypt_instance_ctx *ictx = aead_instance_ctx(inst); |
180 | struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm); |
181 | struct crypto_aead *cipher; |
182 | |
183 | cpu_index = (unsigned int)atomic_inc_return(v: &ictx->tfm_count) % |
184 | cpumask_weight(cpu_online_mask); |
185 | |
186 | ctx->cb_cpu = cpumask_first(cpu_online_mask); |
187 | for (cpu = 0; cpu < cpu_index; cpu++) |
188 | ctx->cb_cpu = cpumask_next(n: ctx->cb_cpu, cpu_online_mask); |
189 | |
190 | cipher = crypto_spawn_aead(spawn: &ictx->spawn); |
191 | |
192 | if (IS_ERR(ptr: cipher)) |
193 | return PTR_ERR(ptr: cipher); |
194 | |
195 | ctx->child = cipher; |
196 | crypto_aead_set_reqsize(aead: tfm, reqsize: sizeof(struct pcrypt_request) + |
197 | sizeof(struct aead_request) + |
198 | crypto_aead_reqsize(tfm: cipher)); |
199 | |
200 | return 0; |
201 | } |
202 | |
203 | static void pcrypt_aead_exit_tfm(struct crypto_aead *tfm) |
204 | { |
205 | struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm); |
206 | |
207 | crypto_free_aead(tfm: ctx->child); |
208 | } |
209 | |
210 | static void pcrypt_free(struct aead_instance *inst) |
211 | { |
212 | struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst); |
213 | |
214 | crypto_drop_aead(spawn: &ctx->spawn); |
215 | padata_free_shell(ps: ctx->psdec); |
216 | padata_free_shell(ps: ctx->psenc); |
217 | kfree(objp: inst); |
218 | } |
219 | |
220 | static int pcrypt_init_instance(struct crypto_instance *inst, |
221 | struct crypto_alg *alg) |
222 | { |
223 | if (snprintf(buf: inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, |
224 | fmt: "pcrypt(%s)" , alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) |
225 | return -ENAMETOOLONG; |
226 | |
227 | memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); |
228 | |
229 | inst->alg.cra_priority = alg->cra_priority + 100; |
230 | inst->alg.cra_blocksize = alg->cra_blocksize; |
231 | inst->alg.cra_alignmask = alg->cra_alignmask; |
232 | |
233 | return 0; |
234 | } |
235 | |
236 | static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb, |
237 | struct crypto_attr_type *algt) |
238 | { |
239 | struct pcrypt_instance_ctx *ctx; |
240 | struct aead_instance *inst; |
241 | struct aead_alg *alg; |
242 | u32 mask = crypto_algt_inherited_mask(algt); |
243 | int err; |
244 | |
245 | inst = kzalloc(size: sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); |
246 | if (!inst) |
247 | return -ENOMEM; |
248 | |
249 | err = -ENOMEM; |
250 | |
251 | ctx = aead_instance_ctx(inst); |
252 | ctx->psenc = padata_alloc_shell(pinst: pencrypt); |
253 | if (!ctx->psenc) |
254 | goto err_free_inst; |
255 | |
256 | ctx->psdec = padata_alloc_shell(pinst: pdecrypt); |
257 | if (!ctx->psdec) |
258 | goto err_free_inst; |
259 | |
260 | err = crypto_grab_aead(spawn: &ctx->spawn, inst: aead_crypto_instance(inst), |
261 | name: crypto_attr_alg_name(rta: tb[1]), type: 0, mask); |
262 | if (err) |
263 | goto err_free_inst; |
264 | |
265 | alg = crypto_spawn_aead_alg(spawn: &ctx->spawn); |
266 | err = pcrypt_init_instance(inst: aead_crypto_instance(inst), alg: &alg->base); |
267 | if (err) |
268 | goto err_free_inst; |
269 | |
270 | inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC; |
271 | |
272 | inst->alg.ivsize = crypto_aead_alg_ivsize(alg); |
273 | inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); |
274 | |
275 | inst->alg.base.cra_ctxsize = sizeof(struct pcrypt_aead_ctx); |
276 | |
277 | inst->alg.init = pcrypt_aead_init_tfm; |
278 | inst->alg.exit = pcrypt_aead_exit_tfm; |
279 | |
280 | inst->alg.setkey = pcrypt_aead_setkey; |
281 | inst->alg.setauthsize = pcrypt_aead_setauthsize; |
282 | inst->alg.encrypt = pcrypt_aead_encrypt; |
283 | inst->alg.decrypt = pcrypt_aead_decrypt; |
284 | |
285 | inst->free = pcrypt_free; |
286 | |
287 | err = aead_register_instance(tmpl, inst); |
288 | if (err) { |
289 | err_free_inst: |
290 | pcrypt_free(inst); |
291 | } |
292 | return err; |
293 | } |
294 | |
295 | static int pcrypt_create(struct crypto_template *tmpl, struct rtattr **tb) |
296 | { |
297 | struct crypto_attr_type *algt; |
298 | |
299 | algt = crypto_get_attr_type(tb); |
300 | if (IS_ERR(ptr: algt)) |
301 | return PTR_ERR(ptr: algt); |
302 | |
303 | switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { |
304 | case CRYPTO_ALG_TYPE_AEAD: |
305 | return pcrypt_create_aead(tmpl, tb, algt); |
306 | } |
307 | |
308 | return -EINVAL; |
309 | } |
310 | |
311 | static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name) |
312 | { |
313 | int ret; |
314 | |
315 | pinst->kobj.kset = pcrypt_kset; |
316 | ret = kobject_add(kobj: &pinst->kobj, NULL, fmt: "%s" , name); |
317 | if (!ret) |
318 | kobject_uevent(kobj: &pinst->kobj, action: KOBJ_ADD); |
319 | |
320 | return ret; |
321 | } |
322 | |
323 | static int pcrypt_init_padata(struct padata_instance **pinst, const char *name) |
324 | { |
325 | int ret = -ENOMEM; |
326 | |
327 | *pinst = padata_alloc(name); |
328 | if (!*pinst) |
329 | return ret; |
330 | |
331 | ret = pcrypt_sysfs_add(pinst: *pinst, name); |
332 | if (ret) |
333 | padata_free(pinst: *pinst); |
334 | |
335 | return ret; |
336 | } |
337 | |
338 | static struct crypto_template pcrypt_tmpl = { |
339 | .name = "pcrypt" , |
340 | .create = pcrypt_create, |
341 | .module = THIS_MODULE, |
342 | }; |
343 | |
344 | static int __init pcrypt_init(void) |
345 | { |
346 | int err = -ENOMEM; |
347 | |
348 | pcrypt_kset = kset_create_and_add(name: "pcrypt" , NULL, parent_kobj: kernel_kobj); |
349 | if (!pcrypt_kset) |
350 | goto err; |
351 | |
352 | err = pcrypt_init_padata(pinst: &pencrypt, name: "pencrypt" ); |
353 | if (err) |
354 | goto err_unreg_kset; |
355 | |
356 | err = pcrypt_init_padata(pinst: &pdecrypt, name: "pdecrypt" ); |
357 | if (err) |
358 | goto err_deinit_pencrypt; |
359 | |
360 | return crypto_register_template(tmpl: &pcrypt_tmpl); |
361 | |
362 | err_deinit_pencrypt: |
363 | padata_free(pinst: pencrypt); |
364 | err_unreg_kset: |
365 | kset_unregister(kset: pcrypt_kset); |
366 | err: |
367 | return err; |
368 | } |
369 | |
370 | static void __exit pcrypt_exit(void) |
371 | { |
372 | crypto_unregister_template(tmpl: &pcrypt_tmpl); |
373 | |
374 | padata_free(pinst: pencrypt); |
375 | padata_free(pinst: pdecrypt); |
376 | |
377 | kset_unregister(kset: pcrypt_kset); |
378 | } |
379 | |
380 | subsys_initcall(pcrypt_init); |
381 | module_exit(pcrypt_exit); |
382 | |
383 | MODULE_LICENSE("GPL" ); |
384 | MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>" ); |
385 | MODULE_DESCRIPTION("Parallel crypto wrapper" ); |
386 | MODULE_ALIAS_CRYPTO("pcrypt" ); |
387 | |